def parse_and_build_pipe(context, json_pipe, pipe_name="anonymous"): pipe = _parse_pipe(json_pipe, pipe_name) pb = build_pipe(context, pipe) return pb if __name__ == "__main__": try: import json json.loads # test access to the attributes of the right json module except (ImportError, AttributeError): import simplejson as json context = Context() pjson = [] usage = "usage: %prog [options] [filename]" parser = OptionParser(usage=usage) parser.add_option("-p", "--pipe", dest="pipeid", help="read pipe JSON from Yahoo", metavar="PIPEID") parser.add_option("-s", dest="savejson", help="save pipe JSON to file", action="store_true") parser.add_option("-v", dest="verbose", help="set verbose debug", action="store_true") (options, args) = parser.parse_args() name = "anonymous" filename = None if len(args): filename = args[0] context.verbose = options.verbose
sw_351 = pipe_rename(context, sw_142, conf={ 'RULE': [{ 'field': { 'type': 'text', 'value': 'language.0.content' }, 'op': { 'type': 'text', 'value': 'copy' }, 'newval': { 'type': 'text', 'value': 'description' } }] }) _OUTPUT = pipe_output(context, sw_351, conf={}) return sw_142 if __name__ == "__main__": pipeline = pipe_125e9fe8bb5f84526d21bebfec3ad116(Context()) for i in pipeline: print i
def getpjsonFromDB(id): scraperwiki.sqlite.attach( 'pipe2py_test' ) q = '* FROM "pipes" WHERE "id"="'+id+'"' data = scraperwiki.sqlite.select(q) #print data pipe_def = json.loads(data[0]['pjson']) if not pipe_def['query']['results']: print "Pipe not found" sys.exit(1) pjson = pipe_def['query']['results']['json']['PIPE']['working'] return pjson pjson=getpjsonFromDB(pipeid) p = compile.parse_and_build_pipe(Context(), pjson) for i in p: #print 'as',i print '<a href="'+i['link']+'">'+i['title']+'</a><br/>',i['summary_detail']['value']+'<br/><br/>' import scraperwiki,json from pipe2py import compile, Context import cgi, os qstring=os.getenv("QUERY_STRING") if qstring!=None: get = dict(cgi.parse_qsl(qstring)) if 'pipeid' in get: pipeid=get['pipeid'] else: pipeid='2de0e4517ed76082dcddf66f7b218057' else: pipeid='2de0e4517ed76082dcddf66f7b218057'
# todo: insert pipeline description here conf = conf or {} if context and context.describe_input: return [(u'', u'gid', u'MP Guardian ID', u'text', u'1276')] if context and context.describe_dependencies: return [u'pipefetchdata', u'pipeoutput', u'pipetextinput', u'pipeurlbuilder'] forever = pipe_forever() sw_606 = pipe_textinput( context, forever, conf={'debug': {'type': 'text', 'value': ''}, 'default': {'type': 'text', 'value': '1276'}, 'prompt': {'type': 'text', 'value': 'MP Guardian ID'}, 'name': {'type': 'text', 'value': 'gid'}, 'position': {'type': 'number', 'value': ''}}) sw_579 = pipe_urlbuilder( context, forever, _1_PATH=sw_606, conf={'PATH': [{'terminal': '1_PATH', 'type': 'text'}, {'type': 'text', 'value': 'json'}], 'BASE': {'type': 'text', 'value': ''}, 'PARAM': [{'value': {'type': 'text', 'value': ''}, 'key': {'type': 'text', 'value': ''}}]}) sw_610 = pipe_fetchdata( context, forever, URL=sw_579, conf={'URL': {'terminal': 'URL', 'type': 'url'}, 'path': {'type': 'text', 'value': 'person'}}) _OUTPUT = pipe_output( context, sw_610, conf={}) return _OUTPUT if __name__ == "__main__": pipeline = pipe_bd0834cfe6cdacb0bea5569505d330b8(Context()) for i in pipeline: print i