Exemple #1
0
#########################################
# Simple table of values from one scraper
#########################################
from scraperwiki.apiwrapper import getKeys, getData, getDataByDate, getDataByLocation
import re

sourcescraper = "swale-democratic-services-events-diary"

limit = 30
offset = 0

keys = getKeys(sourcescraper)
data = getData(sourcescraper, 1, offset)

#res
ure = re.compile(".*(http.*uid=[0-9]*)", re.DOTALL)
nre = re.compile(".*strong.(.*strong)", re.DOTALL)
allre = re.compile(".*", re.DOTALL)

#meta
for row in getData(sourcescraper, limit, offset):
    updated = "%s%s" % (row.get('datetime'), "Z")
    break

print """<?xml version="1.0" encoding="utf-8"?>

<feed xmlns='http://www.w3.org/2005/Atom'>
    <title type='text'>Swale Democratic Services Calendar</title>
    <subtitle type='html'>Open Swale</subtitle>
    <updated>%s</updated>
    <id>http://scraperwikiviews.com/run/feed/?</id>
#########################################
# Simple table of values from one scraper
#########################################
from scraperwiki.apiwrapper import getKeys, getData, getDataByDate, getDataByLocation

sourcescraper = "datagovuk-catalogue"

limit = 20
offset = 0

all_keys = getKeys(sourcescraper)
keys = ['title', 'name', 'department', 'agency', 'notes', 'url', 'resources']
keys += sorted(set(all_keys) - set(keys))

print '<h2>Some data from scraper: %s  (%d columns)</h2>' % (sourcescraper,
                                                             len(keys))
print '<table border="1" style="border-collapse:collapse;">'

# column headings
print "<tr>",
for key in keys:
    print "<th>%s</th>" % key,
print "</tr>"

# rows
for row in getData(sourcescraper, limit, offset):
    print "<tr>",
    for key in keys:
        print "<td>%s</td>" % row.get(key),
    print "</tr>"
#########################################
# Simple table of values from one scraper
#########################################
from scraperwiki.apiwrapper import getKeys, getData, getDataByDate, getDataByLocation

sourcescraper = "cost_of_gov_websites"

limit = 20
offset = 0

keys = getKeys(sourcescraper)
keys.sort()  # alphabetically

print '<h2>Some data from scraper: %s  (%d columns)</h2>' % (sourcescraper, len(keys))
print '<table border="1" style="border-collapse:collapse;">'

# column headings
print "<tr>",
for key in keys:
    print "<th>%s</th>" % key,
print "</tr>"

# rows
for row in getData(sourcescraper, limit, offset):
    print "<tr>",
    for key in keys:
        print "<td>%s</td>" % row.get(key),
    print "</tr>"
    
print "</table>"
#########################################
#########################################
# Simple table of values from one scraper
#########################################
from scraperwiki.apiwrapper import getKeys, getData, getDataByDate, getDataByLocation

sourcescraper = "datagovuk-catalogue"

limit = 20
offset = 0

all_keys = getKeys(sourcescraper)
keys = ['title', 'name', 'department', 'agency', 'notes', 'url', 'resources']
keys += sorted(set(all_keys) - set(keys))

print '<h2>Some data from scraper: %s  (%d columns)</h2>' % (sourcescraper, len(keys))
print '<table border="1" style="border-collapse:collapse;">'

# column headings
print "<tr>",
for key in keys:
    print "<th>%s</th>" % key,
print "</tr>"

# rows
for row in getData(sourcescraper, limit, offset):
    print "<tr>",
    for key in keys:
        print "<td>%s</td>" % row.get(key),
    print "</tr>"
    
print "</table>"