def micropub(data, db, log, siteConfigFilename): # yes, I know, it's a module global... cfg = Config() if os.path.exists(siteConfigFilename): cfg.fromJson(siteConfigFilename) try: if data['event'] == 'create': if 'h' in data: if data['h'].lower() not in ('entry',): return ('Micropub CREATE requires a valid action parameter', 400, {}) else: try: utcdate = datetime.datetime.utcnow() tzLocal = pytz.timezone('America/New_York') timestamp = tzLocal.localize(utcdate, is_dst=None) if 'content' in data and data['content'] is not None: title = data['content'].split('\n')[0] else: title = 'event-%s' % timestamp.strftime('%H%M%S') slug = createSlug(title) year = str(timestamp.year) doy = timestamp.strftime('%j') location = os.path.join(data['baseroute'], year, doy, slug) filename = os.path.join(cfg.paths.content, year, doy, '%s.md' % slug) if os.path.exists(filename): return ('Micropub CREATE failed, location already exists', 406) else: mdata = { 'slug': slug, 'timestamp': timestamp.strftime('%Y-%m-%d %H:%M:%S'), 'location': '%s%s' % (data['baseurl'], location), 'year': year, 'doy': doy, 'micropub': data, 'siteConfig': cfg, } key = 'micropub::%s::%s' % (timestamp.strftime('%Y%m%d%H%M%S'), slug) event = { 'type': 'micropub', 'key': key, } db.set(key, json.dumps(mdata)) db.rpush('kaku-events', json.dumps(event)) db.publish('kaku', 'update') return ('Micropub CREATE successful for %s' % location, 202, {'Location': location}) except Exception: log.exception('Exception during micropub handling') return ('Micropub CREATE failed', 500, {}) else: return ('Invalid Micropub CREATE request', 400, {}) else: return ('Unable to process Micropub %s' % data['event'], 400, {}) except: pass # should only get here if an exception has occurred traceback.print_exc() return ('Unable to process Micropub', 400, {})
def handleEmbed(): app.logger.info('handleEmbed') targetURL = request.args.get('url') responseFormat = request.args.get('format') maxWidth = request.args.get('maxwidth') maxHeight = request.args.get('maxheight') if responseFormat is None: responseFormat = 'json' responseFormat = responseFormat.lower() if targetURL is None: return 'invalid url', 404 else: siteCfg = Config() if os.path.exists(cfg.site_config): siteCfg.fromJson(cfg.site_config) url = urlparse(targetURL) targetRoute = url.path.replace(siteCfg.baseroute, '') if targetRoute.endswith('.html'): targetRoute = targetRoute[:-5] targetFile = os.path.join(siteCfg.paths.content, '%s.json' % targetRoute) print targetURL print url print targetRoute print targetFile # load all known mentions for the target if os.path.exists(targetFile): with open(targetFile, 'r') as h: post = json.load(h) thumbUrl, thumbWidth = findThumbnail(maxWidth, maxHeight) data = { "version": "1.0", "type": "rich", "author_name": post['author'], "author_url": siteCfg.baseurl, "provider_name": baseDomain(siteCfg.baseurl, includeScheme=False), "provider_url": siteCfg.baseurl, "title": post['title'], "thumbnail_url": '%s%s' % (siteCfg.baseurl, thumbUrl), "thumbnail_width": thumbWidth, "thumbnail_height": thumbWidth, "target": targetURL } data['html'] = _embed_html % data if responseFormat == 'json': return jsonify(data) else: return Response(_xml_response % data, mimetype='text/xml')
def loadConfig(configFilename, host=None, port=None, logpath=None): result = Config() result.fromJson(configFilename) if host is not None: result.host = host if port is not None: result.port = port if logpath is not None: result.paths.log = logpath if 'auth_timeout' not in result: result.auth_timeout = 300 if 'require_vouch' not in result: result.require_vouch = False if 'our_domain' not in result: result.our_domain = baseDomain(result.client_id, includeScheme=False) return result
def loadConfig(configFilename, host=None, port=None, basepath=None, logpath=None): result = Config() result.fromJson(configFilename) if host is not None and 'host' not in result: result.host = host if port is not None and 'port' not in result: result.port = port if basepath is not None and 'basepath' not in result: result.basepath = basepath if logpath is not None and 'logpath' not in result: result.logpath = logpath if 'auth_timeout' not in result: result.auth_timeout = 300 if 'require_vouch' not in result: result.require_vouch = False return result
def loadConfig(configFilename, host=None, port=None, basepath=None, logpath=None): result = Config() result.fromJson(configFilename) if host is not None and "host" not in result: result.host = host if port is not None and "port" not in result: result.port = port if basepath is not None and "basepath" not in result: result.basepath = basepath if logpath is not None and "logpath" not in result: result.logpath = logpath if "auth_timeout" not in result: result.auth_timeout = 300 if "require_vouch" not in result: result.require_vouch = False return result
def loadConfig(configFilename, host=None, port=None, logpath=None): result = Config() result.fromJson(configFilename) if host is not None: result.host = host if port is not None: result.port = port if logpath is not None: result.paths.log = logpath if 'auth_timeout' not in result: result.auth_timeout = 300 return result
def mention(sourceURL, targetURL, db, log, siteConfigFilename, vouchDomain=None, vouchRequired=False): """Process the Webmention of the targetURL from the sourceURL. To verify that the sourceURL has indeed referenced our targetURL we run findMentions() at it and scan the resulting href list. """ cfg = Config() if os.path.exists(siteConfigFilename): cfg.fromJson(siteConfigFilename) log.info('discovering Webmention endpoint for %s' % sourceURL) mentions = ronkyuu.findMentions(sourceURL) result = False vouched = False log.info('mentions %s' % mentions) for href in mentions['refs']: if href != sourceURL and href == targetURL: log.info('post at %s was referenced by %s' % (targetURL, sourceURL)) if vouchRequired: if vouchDomain is None: vouched = False result = False else: vouched = processVouch(cfg.paths.content, sourceURL, targetURL, vouchDomain) result = vouched else: vouched = False result = True if result: utcdate = datetime.datetime.utcnow() tzLocal = pytz.timezone('America/New_York') timestamp = tzLocal.localize(utcdate, is_dst=None) mf2Data = Parser(doc=mentions['content']).to_dict() hcard = extractHCard(mf2Data) data = { 'sourceURL': sourceURL, 'targetURL': targetURL, 'vouchDomain': vouchDomain, 'vouched': vouched, 'postDate': timestamp.strftime('%Y-%m-%dT%H:%M:%S'), 'hcard': hcard, 'mf2data': mf2Data, 'siteConfig': cfg, } key = 'webmention::%s::%s' % ( timestamp.strftime('%Y%m%d%H%M%S'), targetURL) event = { 'type': 'webmention', 'key': key, } # mentionData['hcardName'] = hcard['name'] # mentionData['hcardURL'] = hcard['url'] # mentionData['mf2data'] = mf2Data # sData = json.dumps(mentionData) # safeID = generateSafeName(sourceURL) # if db is not None: # db.set('mention::%s' % safeID, sData) # targetFile = os.path.join(domainCfg.basepath, safeID) # with open(targetFile, 'a+') as h: # h.write(sData) # mentionFile = generateMentionName(targetURL, result) # with open(mentionFile, 'w') as h: # h.write(_mention % mentionData) db.set(key, json.dumps(data)) db.rpush('kaku-events', json.dumps(event)) log.info('mention() returning %s' % result) return result, vouched
return False else: print "do something" if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--config", default="./kaku_events.cfg") parser.add_argument("--domain", default=None, help="The domain to receive the micropub actions.") parser.add_argument( "--tokenFile", default=None, help="The file to retrieve the authentication token to use for micropub actions." ) args = parser.parse_args() cfgFiles = findConfigFile(args.config) cfg = Config() if len(cfgFiles) > 0 and os.path.exists(cfgFiles[0]): cfg.fromJson(cfgFiles[0]) logHandler = logging.StreamHandler() logFormatter = logging.Formatter("%(asctime)s %(levelname)-9s %(message)s", "%Y-%m-%d %H:%M:%S") logHandler.setFormatter(logFormatter) logger.addHandler(logHandler) logger.setLevel(logging.DEBUG) domain = args.domain tokenFile = args.tokenFile if domain is None and type(cfg.baseurl) is str: domain = cfg.baseurl
parser = argparse.ArgumentParser() parser.add_argument('--config', default='./kaku_events.cfg') parser.add_argument('--file', default=None, help='A specific markdown file to check and then exit') parser.add_argument( '--force', default=False, action='store_true', help= 'Force any found markdown files (or specific file) to be considered an update.' ) args = parser.parse_args() cfgFiles = findConfigFile(args.config) cfg = Config() cfg.fromJson(cfgFiles[0]) initLogging(cfg.paths.log, cfg.logname) logger.info('kaku_events started') db = getRedis(cfg.redis) with open(os.path.join(cfg.paths.templates, cfg.templates.markdown)) as h: mdPost = h.read() with open(os.path.join(cfg.paths.templates, cfg.templates.embed)) as h: metaEmbed = h.read() if args.file is not None: gather(cfg.paths.content, args.file, args.force) else:
def micropub(data, db, log, siteConfigFilename): # yes, I know, it's a module global... cfg = Config() if os.path.exists(siteConfigFilename): cfg.fromJson(siteConfigFilename) try: if data['event'] == 'create': if 'h' in data: if data['h'].lower() not in ('entry', ): return ( 'Micropub CREATE requires a valid action parameter', 400, {}) else: try: utcdate = datetime.datetime.utcnow() tzLocal = pytz.timezone('America/New_York') timestamp = tzLocal.localize(utcdate, is_dst=None) if 'content' in data and data['content'] is not None: title = data['content'].split('\n')[0] else: title = 'event-%s' % timestamp.strftime('%H%M%S') slug = createSlug(title) year = str(timestamp.year) doy = timestamp.strftime('%j') location = os.path.join(data['baseroute'], year, doy, slug) filename = os.path.join(cfg.paths.content, year, doy, '%s.md' % slug) if os.path.exists(filename): return ( 'Micropub CREATE failed, location already exists', 406) else: mdata = { 'slug': slug, 'timestamp': timestamp.strftime('%Y-%m-%d %H:%M:%S'), 'location': '%s%s' % (data['baseurl'], location), 'year': year, 'doy': doy, 'micropub': data, 'siteConfig': cfg, } key = 'micropub::%s::%s' % ( timestamp.strftime('%Y%m%d%H%M%S'), slug) event = { 'type': 'micropub', 'key': key, } db.set(key, json.dumps(mdata)) db.rpush('kaku-events', json.dumps(event)) db.publish('kaku', 'update') return ('Micropub CREATE successful for %s' % location, 202, { 'Location': location }) except Exception: log.exception('Exception during micropub handling') return ('Micropub CREATE failed', 500, {}) else: return ('Invalid Micropub CREATE request', 400, {}) else: return ('Unable to process Micropub %s' % data['event'], 400, {}) except: pass # should only get here if an exception has occurred traceback.print_exc() return ('Unable to process Micropub', 400, {})
result['body'] = o.body result['user'] = getUser(o.user) result['created_at'] = getDate(o.created_at) result['updated_at'] = getDate(o.updated_at) return result if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', default='./archive.cfg') parser.add_argument('-i', '--issues', action='store_true') parser.add_argument('-o', '--org') parser.add_argument('-r', '--repo') args = parser.parse_args() cfg = Config() cfg.fromJson(args.config) if cfg.auth_token is None: error('Unable to load configuration file %s' % args.config) else: gh = Github(cfg.auth_token) org = gh.get_organization(args.org) repo = org.get_repo(args.repo) if repo is not None: print('scanning', repo.name) data = {} if args.issues: data['issues'] = [] for issue in repo.get_issues(state="all"):