def main(directory, force=False):
    WebServiceApplication.cached_wadl = None  # do not use cached file version
    execute_zcml_for_scripts()
    config = getUtility(IWebServiceConfiguration)

    # First, create an index.html with links to all the HTML
    # documentation files we're about to generate.
    template_file = "apidoc-index.pt"
    template = PageTemplateFile(template_file)
    index_filename = os.path.join(directory, "index.html")
    print "Writing index:", index_filename
    f = open(index_filename, "w")
    f.write(template(config=config))

    # Get the time of the last commit.  We will use this as the mtime for the
    # generated files so that we can safely use it as part of Apache's etag
    # generation in the face of multiple servers/filesystems.
    with bzrlib.initialize():
        branch = Branch.open(os.path.dirname(os.path.dirname(__file__)))
        timestamp = branch.repository.get_revision(branch.last_revision()).timestamp

    # Start a process to build each set of WADL and HTML files.
    processes = []
    for version in config.active_versions:
        p = Process(target=make_files, args=(directory, version, timestamp, force))
        p.start()
        processes.append(p)

    # Wait for all the subprocesses to finish.
    for p in processes:
        p.join()

    return 0
Esempio n. 2
0
def main():
    parser = OptionParser()
    parser.add_option("-r",
                      "--raw",
                      action="store_true",
                      default=False,
                      help="Output full raw data")
    parser.add_option("-f",
                      "--full",
                      action="store_true",
                      default=False,
                      help="Output individual memcached server stats.")
    parser.add_option("-c",
                      "--cricket",
                      action="store_true",
                      default=False,
                      help="Output stats in cricket compatible format.")
    options, args = parser.parse_args()
    if len(args) > 0:
        parser.error("Too many arguments.")
    execute_zcml_for_scripts()
    all_raw_stats = getUtility(IMemcacheClient).get_stats()
    if options.raw:
        pprint(all_raw_stats)
    elif options.cricket:
        print_cricket(all_raw_stats)
    elif options.full:
        print_summary(all_raw_stats)
        print_full(all_raw_stats)
    else:
        print_summary(all_raw_stats)
    return 0
Esempio n. 3
0
    def __enter__(cls):
        def handler(signum, frame):
            # We raise an exception **and** schedule a call to exit the
            # process hard.  This is because we cannot rely on the exception
            # being raised during useful code.  Sometimes, it will be raised
            # while the reactor is looping, which means that it will be
            # ignored.
            #
            # If the exception is raised during the actual job, then we'll get
            # a nice traceback indicating what timed out, and that will be
            # logged as an OOPS.
            #
            # Regardless of where the exception is raised, we'll hard exit the
            # process and have a TimeoutError OOPS logged, although that will
            # have a crappy traceback. See the job_raised callback in
            # TwistedJobRunner.runJobInSubprocess for the other half of that.
            reactor.callFromThread(reactor.callLater, 0, os._exit,
                                   TwistedJobRunner.TIMEOUT_CODE)
            raise TimeoutError

        scripts.execute_zcml_for_scripts(use_web_security=False)
        signal(SIGHUP, handler)
        dbconfig.override(dbuser=cls.dbuser, isolation_level='read_committed')
        # XXX wgrant 2011-09-24 bug=29744: initZopeless used to do this.
        # Should be removed from callsites verified to not need it.
        set_immediate_mail_delivery(True)
Esempio n. 4
0
 def __enter__(cls):
     def handler(signum, frame):
         # We raise an exception **and** schedule a call to exit the
         # process hard.  This is because we cannot rely on the exception
         # being raised during useful code.  Sometimes, it will be raised
         # while the reactor is looping, which means that it will be
         # ignored.
         #
         # If the exception is raised during the actual job, then we'll get
         # a nice traceback indicating what timed out, and that will be
         # logged as an OOPS.
         #
         # Regardless of where the exception is raised, we'll hard exit the
         # process and have a TimeoutError OOPS logged, although that will
         # have a crappy traceback. See the job_raised callback in
         # TwistedJobRunner.runJobInSubprocess for the other half of that.
         reactor.callFromThread(
             reactor.callLater, 0, os._exit, TwistedJobRunner.TIMEOUT_CODE)
         raise TimeoutError
     scripts.execute_zcml_for_scripts(use_web_security=False)
     signal(SIGHUP, handler)
     dbconfig.override(dbuser=cls.dbuser, isolation_level='read_committed')
     # XXX wgrant 2011-09-24 bug=29744: initZopeless used to do this.
     # Should be removed from callsites verified to not need it.
     set_immediate_mail_delivery(True)
def main():
    parser = OptionParser()
    parser.add_option(
        "-r", "--raw", action="store_true", default=False,
        help="Output full raw data")
    parser.add_option(
        "-f", "--full", action="store_true", default=False,
        help="Output individual memcached server stats.")
    parser.add_option(
        "-c", "--cricket", action="store_true", default=False,
        help="Output stats in cricket compatible format.")
    options, args = parser.parse_args()
    if len(args) > 0:
        parser.error("Too many arguments.")
    execute_zcml_for_scripts()
    all_raw_stats = getUtility(IMemcacheClient).get_stats()
    if options.raw:
        pprint(all_raw_stats)
    elif options.cricket:
        print_cricket(all_raw_stats)
    elif options.full:
        print_summary(all_raw_stats)
        print_full(all_raw_stats)
    else:
        print_summary(all_raw_stats)
    return 0
Esempio n. 6
0
def main(directory, force=False):
    WebServiceApplication.cached_wadl = None  # do not use cached file version
    execute_zcml_for_scripts()
    config = getUtility(IWebServiceConfiguration)

    # First, create an index.html with links to all the HTML
    # documentation files we're about to generate.
    template_file = 'apidoc-index.pt'
    template = PageTemplateFile(template_file)
    index_filename = os.path.join(directory, "index.html")
    print "Writing index:", index_filename
    f = open(index_filename, 'w')
    f.write(template(config=config))

    # Get the time of the last commit.  We will use this as the mtime for the
    # generated files so that we can safely use it as part of Apache's etag
    # generation in the face of multiple servers/filesystems.
    with bzrlib.initialize():
        branch = Branch.open(os.path.dirname(os.path.dirname(__file__)))
        timestamp = branch.repository.get_revision(
            branch.last_revision()).timestamp

    # Start a process to build each set of WADL and HTML files.
    processes = []
    for version in config.active_versions:
        p = Process(target=make_files,
            args=(directory, version, timestamp, force))
        p.start()
        processes.append(p)

    # Wait for all the subprocesses to finish.
    for p in processes:
        p.join()

    return 0
Esempio n. 7
0
def ensure_zcml():
    """Ensure the zcml has been executed for the current process."""
    global needs_zcml
    if not needs_zcml:
        return
    transaction.abort()
    scripts.execute_zcml_for_scripts(use_web_security=False)
    set_immediate_mail_delivery(True)
    needs_zcml = False
Esempio n. 8
0
def ensure_zcml():
    """Ensure the zcml has been executed for the current process."""
    global needs_zcml
    if not needs_zcml:
        return
    transaction.abort()
    scripts.execute_zcml_for_scripts(use_web_security=False)
    set_immediate_mail_delivery(True)
    needs_zcml = False
Esempio n. 9
0
def _get_locals():
    if len(sys.argv) > 1:
        dbuser = sys.argv[1]
    else:
        dbuser = None
    dbconfig.override(dbuser=dbuser)
    execute_zcml_for_scripts()
    readline.parse_and_bind('tab: complete')
    # Mimic the real interactive interpreter's loading of any
    # $PYTHONSTARTUP file.
    startup = os.environ.get('PYTHONSTARTUP')
    if startup:
        execfile(startup)
    store = IMasterStore(Person)

    if dbuser == 'launchpad':
        # Create a few variables "in case they come in handy."
        # Do we really use these?  Are they worth carrying around?
        d = Distribution.get(1)
        p = Person.get(1)
        ds = DistroSeries.get(1)
        prod = Product.get(1)
        proj = ProjectGroup.get(1)
        b2 = Bug.get(2)
        b1 = Bug.get(1)
        s = Specification.get(1)
        q = Question.get(1)
        # Silence unused name warnings
        d, p, ds, prod, proj, b2, b1, s, q

    # Having a factory instance is handy.
    factory = LaunchpadObjectFactory()

    def browser_open(obj, *args, **kwargs):
        """Open a (possibly newly-created) object's view in a web browser.

        Accepts the same parameters as canonical_url.

        Performs a commit before invoking the browser, so
        "browser_open(factory.makeFoo())" works.
        """
        transaction.commit()
        webbrowser.open(canonical_url(obj, *args, **kwargs))

    # Silence unused name warnings
    factory, store

    res = {}
    res.update(locals())
    res.update(globals())
    del res['_get_locals']
    return res
Esempio n. 10
0
def _get_locals():
    if len(sys.argv) > 1:
        dbuser = sys.argv[1]
    else:
        dbuser = None
    dbconfig.override(dbuser=dbuser)
    execute_zcml_for_scripts()
    readline.parse_and_bind('tab: complete')
    # Mimic the real interactive interpreter's loading of any
    # $PYTHONSTARTUP file.
    startup = os.environ.get('PYTHONSTARTUP')
    if startup:
        execfile(startup)
    store = IMasterStore(Person)

    if dbuser == 'launchpad':
        # Create a few variables "in case they come in handy."
        # Do we really use these?  Are they worth carrying around?
        d = Distribution.get(1)
        p = Person.get(1)
        ds = DistroSeries.get(1)
        prod = Product.get(1)
        proj = ProjectGroup.get(1)
        b2 = Bug.get(2)
        b1 = Bug.get(1)
        s = Specification.get(1)
        q = Question.get(1)
        # Silence unused name warnings
        d, p, ds, prod, proj, b2, b1, s, q

    # Having a factory instance is handy.
    factory = LaunchpadObjectFactory()

    def browser_open(obj, *args, **kwargs):
        """Open a (possibly newly-created) object's view in a web browser.

        Accepts the same parameters as canonical_url.

        Performs a commit before invoking the browser, so
        "browser_open(factory.makeFoo())" works.
        """
        transaction.commit()
        webbrowser.open(canonical_url(obj, *args, **kwargs))

    # Silence unused name warnings
    factory, store

    res = {}
    res.update(locals())
    res.update(globals())
    del res['_get_locals']
    return res
def main():
    """Print all stacked branches from the database.

    See the module docstring for more information.
    """
    parser = OptionParser(
        description="List the stacked branches in Launchpad.")
    parser.parse_args()

    execute_zcml_for_scripts()
    for db_branch in get_stacked_branches():
        stacked_on = db_branch.stacked_on
        print '%s %s %s %s %s' % (db_branch.id, db_branch.branch_type.name,
                                  db_branch.unique_name, stacked_on.id,
                                  stacked_on.unique_name)
def main():
    """Print all stacked branches from the database.

    See the module docstring for more information.
    """
    parser = OptionParser(
        description="List the stacked branches in Launchpad.")
    parser.parse_args()

    execute_zcml_for_scripts()
    for db_branch in get_stacked_branches():
        stacked_on = db_branch.stacked_on
        print '%s %s %s %s %s' % (
            db_branch.id, db_branch.branch_type.name, db_branch.unique_name,
            stacked_on.id, stacked_on.unique_name)
#! /usr/bin/python -S
#
# Copyright 2010 Canonical Ltd.  This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).

"""Perform simple librarian operations to verify the current configuration.
"""

import _pythonpath

import sys

from zope.component import getUtility

from lp.services.librarian.interfaces.client import (
    ILibrarianClient,
    IRestrictedLibrarianClient,
    )
from lp.services.librarian.smoketest import do_smoketest
from lp.services.scripts import execute_zcml_for_scripts


if __name__ == '__main__':
    execute_zcml_for_scripts()
    restricted_client = getUtility(IRestrictedLibrarianClient)
    regular_client = getUtility(ILibrarianClient)
    sys.exit(do_smoketest(restricted_client, regular_client))
Esempio n. 14
0
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Generate a preamble for slonik(1) scripts based on the current LPCONFIG.
"""

__metaclass__ = type
__all__ = []

import _pythonpath

from optparse import OptionParser
import time

from lp.services import scripts
from lp.services.config import config
from lp.services.database.sqlbase import connect
import replication.helpers

if __name__ == '__main__':
    parser = OptionParser()
    scripts.db_options(parser)
    (options, args) = parser.parse_args()
    if args:
        parser.error("Too many arguments")
    scripts.execute_zcml_for_scripts(use_web_security=False)

    con = connect()
    print '# slonik(1) preamble generated %s' % time.ctime()
    print '# LPCONFIG=%s' % config.instance_name
    print
    print replication.helpers.preamble(con)
Esempio n. 15
0
"""Generate a preamble for slonik(1) scripts based on the current LPCONFIG.
"""

__metaclass__ = type
__all__ = []

import _pythonpath

from optparse import OptionParser
import time

from lp.services import scripts
from lp.services.config import config
from lp.services.database.sqlbase import connect
import replication.helpers


if __name__ == '__main__':
    parser = OptionParser()
    scripts.db_options(parser)
    (options, args) = parser.parse_args()
    if args:
        parser.error("Too many arguments")
    scripts.execute_zcml_for_scripts(use_web_security=False)

    con = connect()
    print '# slonik(1) preamble generated %s' % time.ctime()
    print '# LPCONFIG=%s' % config.instance_name
    print
    print replication.helpers.preamble(con)
Esempio n. 16
0
settings work. Note we need to use a non-default isolation level to
confirm that the changes are actually being made by the API calls."""

__metaclass__ = type
__all__ = []

import warnings

import transaction

from lp.services.config import dbconfig
from lp.services.database.sqlbase import cursor
from lp.services.scripts import execute_zcml_for_scripts
from lp.testing.layers import disconnect_stores

execute_zcml_for_scripts()


def check():
    cur = cursor()
    cur.execute("UPDATE Person SET homepage_content='foo' WHERE name='mark'")
    cur.execute("SHOW transaction_isolation")
    print cur.fetchone()[0]

    transaction.abort()
    transaction.begin()

    cur = cursor()
    cur.execute("UPDATE Person SET homepage_content='bar' WHERE name='mark'")
    cur.execute("SHOW transaction_isolation")
    print cur.fetchone()[0]
Esempio n. 17
0
 def _init_zca(self, use_web_security):
     """Initialize the ZCA, this can be overridden for testing purposes."""
     scripts.execute_zcml_for_scripts(use_web_security=use_web_security)
Esempio n. 18
0
 def _init_zca(self, use_web_security):
     """Initialize the ZCA, this can be overridden for testing purposes."""
     scripts.execute_zcml_for_scripts(use_web_security=use_web_security)