def setUp(self): global _app if _app is None: _app = create_app(SQLALCHEMY_DATABASE_URI=os.environ.get( "TEST_DATABASE_URL", 'sqlite:///:memory:', )) self.app = _app if os.environ.get("SA_ECHO", "False") == "True": db.engine.echo = True create_db()
def setUp(self): global _app if _app is None: _app = create_app( SQLALCHEMY_DATABASE_URI=os.environ.get( "TEST_DATABASE_URL", 'sqlite:///:memory:', ) ) self.app = _app if os.environ.get("SA_ECHO", "False") == "True": db.engine.echo = True create_db()
import os import codecs import logging import datetime as dt import requests from flask.ext.script import Manager from sqlalchemy import not_ from iatilib.frontend import create_app from iatilib import parse, codelists, model, db, redis from iatilib.crawler import manager as crawler_manager from iatilib.queue import manager as queue_manager manager = Manager(create_app(DEBUG=False)) manager.add_command("crawl", crawler_manager) manager.add_command("queue", queue_manager) @manager.shell def make_shell_context(): return dict(app=manager.app, db=db, rdb=redis, model=model, codelists=codelists) @manager.command def download_codelists(): "Download CSV codelists from IATI"
indexed_resource.state = READY db.session.commit() except etree.XMLSyntaxError, e: error = 'Download Failed: Invalid XML: %s' % (unicode(e)) print error indexed_resource.logerrors.append(LogError(text=error,level=2)) indexed_resource.state = BROKEN except IOError, e: error = 'Download Failed: Bad URL? %s' % (unicode(e)) print error indexed_resource.logerrors.append(LogError(text=error,level=2)) indexed_resource.state = BROKEN except Exception as e: error = 'Download Failed: Uncaught Exception: %s' % (unicode(e)) print error indexed_resource.logerrors.append(LogError(text=error,level=2)) indexed_resource.state = BROKEN # Retry on the next pass # Send new objects to the database db.session.commit() if __name__=='__main__': from iatilib.frontend import create_app argparser = argparse.ArgumentParser(description='') argparser.add_argument('-d', '--debug', type=int, dest='debug_limit', help='Debug: Limit the number of records to be handled in this sweep.') argparser.add_argument('-v', '--verbose', action='store_true', help='Verbose mode') arg = argparser.parse_args() app = create_app() download_loop(debug_limit=arg.debug_limit,verbose=arg.verbose)
import os import codecs import logging import datetime as dt import requests from flask.ext.script import Manager from iatilib.frontend import create_app from iatilib import parse, codelists, model, db, redis from iatilib.crawler import manager as crawler_manager from iatilib.queue import manager as queue_manager manager = Manager(create_app(DEBUG=True)) manager.add_command("crawl", crawler_manager) manager.add_command("queue", queue_manager) @manager.shell def make_shell_context(): return dict( app=manager.app, db=db, rdb=redis, model=model, codelists=codelists) @manager.command def download_codelists():
import os import codecs import logging import datetime as dt import requests from flask.ext.script import Manager from sqlalchemy import not_ from iatilib.frontend import create_app from iatilib import parse, codelists, model, db, redis from iatilib.crawler import manager as crawler_manager from iatilib.queue import manager as queue_manager manager = Manager(create_app(DEBUG=False)) manager.add_command("crawl", crawler_manager) manager.add_command("queue", queue_manager) @manager.shell def make_shell_context(): return dict( app=manager.app, db=db, rdb=redis, model=model, codelists=codelists) @manager.command def download_codelists():
def setUp(self): self.app = create_app( SQLALCHEMY_DATABASE_URI='sqlite:///:memory:') create_db()
from iatilib.frontend import create_app app = create_app() try: import newrelic.agent app = newrelic.agent.wsgi_application()(app) except ImportError: pass
import os import codecs import logging import datetime as dt import requests from flask.ext.script import Manager from sqlalchemy import not_ from iatilib.frontend import create_app from iatilib import parse, codelists, model, db, redis from iatilib.crawler import manager as crawler_manager from iatilib.queue import manager as queue_manager manager = Manager(create_app(DEBUG=True)) manager.add_command("crawl", crawler_manager) manager.add_command("queue", queue_manager) @manager.shell def make_shell_context(): return dict(app=manager.app, db=db, rdb=redis, model=model, codelists=codelists) @manager.command def download_codelists(): "Download CSV codelists from IATI"
import sqlalchemy as sa import prettytable from flask.ext.script import Manager from iatilib.frontend import create_app, db from iatilib import magic_numbers from iatilib.model import IndexedResource, RawXmlBlob manager = Manager(create_app()) def qtable(itr, headers=None): if headers is None: headers = next(itr) tbl = prettytable.PrettyTable(headers) for row in itr: tbl.add_row(row) return tbl @manager.command def status(): print "Database: %s" % manager.app.config["SQLALCHEMY_DATABASE_URI"] print resource_status = db.session.query( IndexedResource.state, sa.func.count()).group_by(IndexedResource.state) print "Download" print qtable(
except Exception: db.session.rollback() print >>sys.stderr, "Could not parse xml blob id=%s" % xmlblob.id traceback.print_exc() xmlblob.parsed = False db.session.commit() if fail_fast: raise parsed += 1 if (debug_limit is not None) and parsed >= debug_limit: return if __name__ == '__main__': from iatilib.frontend import create_app create_app() argparser = argparse.ArgumentParser(description='') argparser.add_argument('-d', '--debug', type=int, dest='debug_limit', help='Debug: Limit the number of activities to be handled in this sweep.') argparser.add_argument('-v', '--verbose', action='store_true', help='Verbose mode') argparser.add_argument( '--fail-fast', action='store_true', help='Terminate if parser hits an error') arg = argparser.parse_args() parse_loop( debug_limit=arg.debug_limit, verbose=arg.verbose, fail_fast=arg.fail_fast)