コード例 #1
0
ファイル: demo.py プロジェクト: serialc/BSR_parsers
#  {"rid":"9","bssid":"chicago","feedname":"Divvy JSON","feedurl":"http://www.divvybikes.com/stations/json","feedurl2":"","format":"json","keyreq":"no","parsername":"motivate"}]
# We see that the parsername was specified in the last update.

# let's use the last update
chicago_feeds_json = json.loads(chicago_feeds)
chicago_data_feed = chicago_feeds_json[len(chicago_feeds_json)-1]

# Get parsers to easily retrieve data by cloning the BSR_parsers repository locally:
# git clone https://github.com/serialc/BSR_parsers.git

# load the python parser
from bsrp import BSRParser

# Example 1: Chicago and method chaining
# create instance of parser with data feed details
parser = BSRParser(chicago_data_feed)

# retrieving will return False or, if successfull, the parser object (self)
parser.retrieve()

# save cleaned data locally according to the default schema (fullset)
parser.save('')

# save the raw data if you wish
parser.save_raw('')

# retrieve the data in array form, perhaps to insert into DB
data_array = parser.get_data_array()

# or, since most of these methods return the object/self, do it all in one line
data_array = parser.retrieve().save('').save_raw('').get_data_array()
コード例 #2
0
ファイル: test.py プロジェクト: serialc/BSR_parsers
try:
    res = urllib2.urlopen("http://bikeshare-research.org/api/v1/categories/data/systems/" + bssid)
except urllib2.URLError:
    print "Couldn't retrieve the URL due to either a) Incorrect bssid, or b) Can't establish connection to server."

feeds = json.loads(res.read())

if len(feeds) == 0:
    print "No feed is specified for this BSS."
if len(feeds) > 1:
    print "The feed has " + str(len(feeds)) + " parts."

for feed in feeds:
    if feed['parsername'] is not None:
        print "Using parser: " + feed['parsername']
        parser = BSRParser(feed)

        if apikey is not '':
            print apikey
            parser.set_apikey(apikey)

        parser.retrieve()
        parser.parse()
        print parser.get_string()

        # save
        if isinstance( parser.get_raw(), basestring ):
            parser.save_raw( "" )
            print "Saved raw scraped data to " + bssid + "_test_results_raw.txt"

        parser.save( "" )
コード例 #3
0
except urllib3.URLError:
    print(
        "Couldn't retrieve the URL due to either a) Incorrect bssid, or b) Can't establish connection to server."
    )

feeds = json.loads(res.text)

if len(feeds) == 0:
    print("No feed is specified for this BSS.")
if len(feeds) > 1:
    print("The feed has " + str(len(feeds)) + " parts.")

for feed in feeds:
    if feed['parsername'] is not None:
        print("Using parser: " + feed['parsername'])
        parser = BSRParser(feed)

        if apikey != '':
            print(apikey)
            parser.set_apikey(apikey)

        parser.retrieve()
        parser.parse()
        stns = parser.get_data_array()
        # test reuse of stations
        if stns:
            # stns is not equal to false
            for stn in stns:
                # print data array
                try:
                    print(stn)