예제 #1
0
def main():
    """handle user command when run as top level program"""
    from optparse import OptionParser, make_option

    usage = 'usage: %prog [options] event_files (default "events.xml")'
    version = "%prog 0.0 alpha"
    option_list = [
      make_option('-f','--fifo',metavar='FIFO',default='event.fifo',help='write events to FIFO (default "event.fifo")'),
      make_option('-z','--gunzip',action='store_true',help='unzip input file while reading'),
      make_option('-d','--debug_out',metavar='debug_out',default=None,help='If set, a file with events that were read'),
    ]
    parser = OptionParser(usage=usage,version=version,option_list=option_list)
    (opt,args) = parser.parse_args()

    if (opt.debug_out is not None):
        try:
            debug_out=file(opt.debug_out,'wb')
        except:
            print 'Failure to open debug file requested for event source.\n'
    else:
        debug_out=None

    if not args:
        args = ["events.xml"]

    for arg in args:
        if arg == '-':
            events_file = sys.stdin
        elif opt.gunzip:
            import gzip
            events_file = gzip.open(arg, "r")
        else:
            events_file = open(arg, "r")
        write_events(opt.fifo, events_file,debug_out)
예제 #2
0
def getProgOptions():
    from optparse import OptionParser, make_option
    option_list = [
        make_option("-i", "--in-seq",
        action="append", type="string",dest="inSeq"),
        make_option("-o", "--out-name",
        action="store", type="string",dest="outName"),
        make_option("-s", "--num-splits",
        action="store", type="int",dest="nSplits",default=3),
        make_option("-m", "--min-samp-count",
        action="store", type="int",dest="minSampCount",default=100),
        make_option("-t", "--max-samp-seq",
        action="store", type="int",dest="maxSampCountPerSeq"),
        make_option("-l", "--samp-len",
        action="store", type="int",dest="sampLen",default=1000),
        make_option("-f", "--samp-offset",
        action="store", type="int",dest="sampOffset",default=0),
        make_option("-d", "--make-other",
        action="store_true", dest="makeOther",default=False),
        make_option("-a", "--alphabet",
        action="store", type="choice",choices=("dna","protein"),
        dest="alphabet",default="dna"),
        make_option("-e", "--degen-len",
        action="store", type="int",dest="degenLen",default=1),
    ]
    parser = OptionParser(usage = "usage: %prog [options]",option_list=option_list)
    (options, args) = parser.parse_args()

    return options,args
예제 #3
0
def options():
    return [
        op.make_option("-a", "--admin", metavar="USER:PASS", default=None,
            help="Add an admin account to the development cluster"),
        op.make_option("-n", "--nodes", metavar="N", default=DEFAULT_N,
            type="int", help="Number of development nodes to be spun up")
    ]
예제 #4
0
def configuration_options():
    return [
        optparse.make_option(
            "-t",
            "--target",
            default=config.Config(executive.Executive(), filesystem.FileSystem()).default_configuration(),
            dest="configuration",
            help="(DEPRECATED) (default: %default)",
        ),
        optparse.make_option(
            "--debug", action="store_const", const="Debug", dest="configuration", help="Set the configuration to Debug"
        ),
        optparse.make_option(
            "--release",
            action="store_const",
            const="Release",
            dest="configuration",
            help="Set the configuration to Release",
        ),
        optparse.make_option(
            "--32-bit",
            action="store_const",
            const="x86",
            default=None,
            dest="architecture",
            help="use 32-bit binaries by default (x86 instead of x86_64)",
        ),
    ]
 def __init__(self):
     options = [
         make_option("--committer-minimum", action="store", dest="committer_minimum", type="int", default=10, help="Specify minimum patch count for Committer nominations."),
         make_option("--reviewer-minimum", action="store", dest="reviewer_minimum", type="int", default=80, help="Specify minimum patch count for Reviewer nominations."),
         make_option("--show-commits", action="store_true", dest="show_commits", default=False, help="Show commit history with nomination suggestions."),
     ]
     super(SuggestNominations, self).__init__(options=options)
예제 #6
0
 def __init__(self):
     options = [
         steps.Options.cc,
         steps.Options.component,
         make_option(
             "--no-prompt",
             action="store_false",
             dest="prompt",
             default=True,
             help="Do not prompt for bug title and comment; use commit log instead.",
         ),
         make_option(
             "--no-review",
             action="store_false",
             dest="review",
             default=True,
             help="Do not mark the patch for review.",
         ),
         make_option(
             "--request-commit",
             action="store_true",
             dest="request_commit",
             default=False,
             help="Mark the patch as needing auto-commit after review.",
         ),
     ]
     AbstractDeclarativeCommand.__init__(self, options=options)
예제 #7
0
 def __init__(self, options=None): # Default values should never be collections (like []) as default values are shared between invocations
     options_list = (options or []) + [
         make_option("--no-confirm", action="store_false", dest="confirm", default=True, help="Do not ask the user for confirmation before running the queue.  Dangerous!"),
         make_option("--exit-after-iteration", action="store", type="int", dest="iterations", default=None, help="Stop running the queue after iterating this number of times."),
     ]
     Command.__init__(self, "Run the %s" % self.name, options=options_list)
     self._iteration_count = 0
예제 #8
0
 def __init__(self):
     options = [
         make_option(
             "-b",
             "--bug-id",
             action="store",
             type="string",
             dest="bug_id",
             help="Specify bug id if no URL is provided in the commit log.",
         ),
         make_option(
             "--add-log-as-comment",
             action="store_true",
             dest="add_log_as_comment",
             default=False,
             help="Add commit log message as a comment when uploading the patch.",
         ),
         make_option(
             "-m",
             "--description",
             action="store",
             type="string",
             dest="description",
             help="Description string for the attachment (default: description from commit message)",
         ),
         steps.Options.obsolete_patches,
         steps.Options.review,
         steps.Options.request_commit,
     ]
     AbstractDeclarativeCommand.__init__(self, options=options, requires_local_commits=True)
예제 #9
0
 def __init__(self):
     options = [
         make_option(
             "--bug-id",
             action="store",
             type="string",
             dest="bug_id",
             help="Specify bug id if no URL is provided in the commit log.",
         ),
         make_option(
             "--comment", action="store", type="string", dest="comment", help="Text to include in bug comment."
         ),
         make_option(
             "--open",
             action="store_true",
             default=False,
             dest="open_bug",
             help="Open bug in default web browser (Mac only).",
         ),
         make_option(
             "--update-only",
             action="store_true",
             default=False,
             dest="update_only",
             help="Add comment to the bug, but do not close it.",
         ),
     ]
     AbstractDeclarativeCommand.__init__(self, options=options)
예제 #10
0
파일: lol-clt.py 프로젝트: jsachs/lol-clt
def main():
    """Runs program and handles command line options"""
    option_list = [
        optparse.make_option("-g", "--game",
                    action="store_true", dest="game",
                    help="displays most recent game info for a given summoner"),
        optparse.make_option("-s", "--stats",
                    action="store_true", dest="stats",
                    help="displays stats for a given summoner")
    ]
    p = optparse.OptionParser(description=' Displays LoL game information',
                              prog='lol-clt',
                              version='lol-clt 0.1',
                              option_list=option_list,
                              usage="usage: %prog [option] <summoner>"
    )
    options, arguments = p.parse_args()

    if len(arguments) == 1:
        if options.game:
            info = LGI(arguments[0])
            info.print_current_game_info()
            return  
        if options.stats:
            info = LGI(arguments[0])
            info.print_unranked_summoner_stats()
            return
    p.print_help()  
예제 #11
0
 def __init__(self, *args, **kwargs):
     self.content_types = [
         ContentType.objects.get(app_label='discovery', model='device'),
         ContentType.objects.get(app_label='business', model='venture'),
         ContentType.objects.get(app_label='business', model='venturerole'),
         ContentType.objects.get(app_label='discovery', model='datacenter'),
         ContentType.objects.get(app_label='discovery', model='network'),
         # not used as for now.
         # ContentType.objects.get(app_label='discovery',
         # model='networkterminator'),
     ]
     self.actions = ['purge', 'import']
     self.kinds = [
         'ci', 'user-relations', 'all-relations', 'system-relations'
     ]
     self.option_list = []
     self.option_list.extend(BaseCommand.option_list)
     self.option_list.extend([
         make_option(
             '--action', dest='action', help="Purge all CI and Relations."
         ),
         make_option(
             '--kind', dest='kind', help="Choose import kind.",
         ),
         make_option(
             '--ids', dest='ids',
             help="Choose ids to import.",
         ),
         make_option(
             '--content-types', dest='content_types',
             help="Type of content to reimport.",
             default=[],
         )
     ])
예제 #12
0
파일: dsdl.py 프로젝트: Trilliant/pyang
 def add_opts(self, optparser):
     optlist = [
         optparse.make_option("--dsdl-no-documentation",
                              dest="dsdl_no_documentation",
                              action="store_true",
                              default=False,
                              help="No output of DTD compatibility"
                              " documentation annotations"),
         optparse.make_option("--dsdl-no-dublin-core",
                              dest="dsdl_no_dublin_core",
                              action="store_true",
                              default=False,
                              help="No output of Dublin Core"
                              " metadata annotations"),
         optparse.make_option("--dsdl-record-defs",
                              dest="dsdl_record_defs",
                              action="store_true",
                              default=False,
                              help="Record all top-level defs"
                              " (even if not used)"),
         optparse.make_option("--dsdl-lax-yang-version",
                              dest="dsdl_lax_yang_version",
                              action="store_true",
                              default=False,
                              help="Try to translate modules with "
                              "unsupported YANG versions (use at own risk)"),
         ]
     g = optparser.add_option_group("Hybrid DSDL schema "
                                    "output specific options")
     g.add_options(optlist)
예제 #13
0
    def __new__(cls, *args, **kwargs):
        """
        Sets option_list and help dynamically.
        """
        # instantiate
        obj = super(BaseTenantCommand, cls).__new__(cls, *args, **kwargs)

        app_name = get_commands()[obj.COMMAND_NAME]
        if isinstance(app_name, BaseCommand):
            # If the command is already loaded, use it directly.
            cmdclass = app_name
        else:
            cmdclass = load_command_class(app_name, obj.COMMAND_NAME)

        # inherit the options from the original command
        obj.option_list = cmdclass.option_list
        #print obj.option_list
        obj.option_list += (
            make_option("-s", "--schema", dest="schema_name"),
            )
        obj.option_list += (
            make_option("-p", "--skip-public", dest="skip_public", action="store_true", default=False),
            )

        # prepend the command's original help with the info about schemata iteration
        obj.help = "Calls %s for all registered schemata. You can use regular %s options. "\
                   "Original help for %s: %s"\
        % (obj.COMMAND_NAME, obj.COMMAND_NAME, obj.COMMAND_NAME,\
           getattr(cmdclass, 'help', 'none'))
        return obj
예제 #14
0
def main(config=None, timing=None, wrapping=True):
  """"""
  #define the options
  usage = "usage: %prog [options]"
  version = "%prog 0.2.6"
  options = [
    make_option("-c","--config",default="config.xml",help="The configuration file name"),
    make_option("-t","--timing",default=None,help="Output timing (information in % increments for scoring)")]
  parser = OptionParser(usage=usage, version=version, option_list=options)
  
  #parse the options
  if not config:
    (options, arguments) = parser.parse_args()
    config = options.config
    timing = options.timing
  
  #make a producer
  mine = Producer(timing, wrapping)
  #input the configurations
  mine.inputConfigs(config)
  #input the PMML
  mine.inputPMML()
  #create the statistics
  mine.getStats()
  #make the TestDistributions elements
  mine.makeTests()
  #output the PMML
  mine.outputPMML()
  #stop timing if it is going
  mine.stop()
예제 #15
0
def make_options():
    g_settings = make_settings(ignore=("version"))

    keys = g_settings.keys()
    def sorter(k):
        return (g_settings[k].section, g_settings[k].order)

    opts = [
        make_option('--adminmedia', dest='admin_media_path', default='',
        help='Specifies the directory from which to serve admin media.')
    ]

    for k in keys:
        setting = g_settings[k]
        if not setting.cli:
            continue

        args = tuple(setting.cli)

        kwargs = {
            "dest": setting.name,
            "metavar": setting.meta or None,
            "action": setting.action or "store",
            "type": setting.type or "string",
            "default": None,
            "help": "%s [%s]" % (setting.short, setting.default)
        }
        if kwargs["action"] != "store":
            kwargs.pop("type")

        opts.append(make_option(*args, **kwargs))

    return tuple(opts)
예제 #16
0
def print_options():
    return [
        optparse.make_option(
            "-q",
            "--quiet",
            action="store_true",
            default=False,
            help="run quietly (errors, warnings, and progress only)",
        ),
        optparse.make_option(
            "-v",
            "--verbose",
            action="store_true",
            default=False,
            help="print a summarized result for every test (one line per test)",
        ),
        optparse.make_option(
            "--details", action="store_true", default=False, help="print detailed results for every test"
        ),
        optparse.make_option(
            "--debug-rwt-logging",
            action="store_true",
            default=False,
            help="print timestamps and debug information for run-webkit-tests itself",
        ),
    ]
예제 #17
0
파일: sid.py 프로젝트: cabo/yang-cbor
    def add_opts(self, optparser):
        optlist = [
            optparse.make_option("--sid-help",
                                 dest="sid_help",
                                 action="store_true",
                                 help="Print help on automatic SID generation"),
            optparse.make_option("--generate-sid-file",
                                 action="store",
                                 type="string",
                                 dest="generate_sid_file",
                                 help="Generate a .sid file."),
            optparse.make_option("--update-sid-file",
                                 action="store",
                                 type="string",
                                 dest="update_sid_file",
                                 help="Generate a .sid file based on a previous .sid file."),
            optparse.make_option("--check-sid-file",
                                 action="store",
                                 type="string",
                                 dest="check_sid_file",
                                 help="Check the consistency between a .sid file and the .yang file(s)."),
            optparse.make_option("--list-sid",
                                 action="store_true",
                                 dest="list_sid",
                                 help="Print the list of SID."),
            ]

        g = optparser.add_option_group("SID file specific options")
        g.add_options(optlist)
예제 #18
0
def main(argv=None):

    # this allows us to call main from the interpreter
    if argv is None:
        argv = sys.argv

    # Process options
    parser = OptionParser(
        usage = "usage: %prog [<options>] module ...",
        version = Version,
        option_list=[
        make_option('-x',"--excludes", help="A list of modules to be excluded. It should be a single string with the modules sepeated by blanks.",
                    action="store", dest="exclude", default=''),
        make_option("-d","--dir", help="Destination directory for the output.",
                    action="store", dest="dir", default='.'),
        ])
    options, args = parser.parse_args()

    print options.exclude.split()

    pydoc2.PackageDocumentationGenerator(
        baseModules = args,
        exclusions = options.exclude.split(),
        destinationDirectory = options.dir,
        ).process ()
예제 #19
0
def test_consume_nargs_and_options():
    from optparse import make_option

    @tasks.task
    @tasks.consume_nargs(2)
    @tasks.cmdopts([
        make_option("-f", "--foo", help="foo")
    ])
    def t1(options):
        assert options.foo == "1"
        assert options.t1.foo == "1"
        assert options.args == ['abc', 'def']

    @tasks.task
    @tasks.consume_nargs(2)
    @tasks.cmdopts([
        make_option("-f", "--foo", help="foo")
    ])
    def t2(options):
        assert options.foo == "2"
        assert options.t2.foo == "2"
        assert options.args == ['ghi', 'jkl']


    environment = _set_environment(t1=t1, t2=t2)
    tasks._process_commands([
        't1', '--foo', '1', 'abc', 'def',
        't2', '--foo', '2', 'ghi', 'jkl',
    ])
    assert t1.called
예제 #20
0
파일: swagger.py 프로젝트: qooqle/COP
    def add_opts(self, optparser):
        # A list of command line options supported by the swagger plugin.
        # TODO: which options are really needed?
        optlist = [
            optparse.make_option(
                '--swagger-help',
                dest='swagger_help',
                action='store_true',
                help='Print help on swagger options and exit'),
            optparse.make_option(
                '--swagger-depth',
                type='int',
                dest='swagger_depth',
                default=5,
                help='Number of levels to print'),
            optparse.make_option(
                '--simplify-api',
		default=False,
                dest='s_api',
                help='Simplified apis'),
            optparse.make_option(
                '--swagger-path',
                dest='swagger_path',
                type='string',
                help='Path to print')]
        optgrp = optparser.add_option_group('Swagger specific options')
        optgrp.add_options(optlist)
예제 #21
0
 def add_opts(self, optparser):
   optlist = [
       optparse.make_option("--meta-only",
                             dest="meta_only",
                             action="store_true",
                             help="""Only produce documentation based on the
                             module metadata"""),
       optparse.make_option("--doc-format",
                             dest="doc_format",
                             action="store",
                             type="string",
                             default="markdown",
                             help="""Doc output format: markdown, html"""),
       optparse.make_option("--strip-ns",
                             dest="strip_namespace",
                             action="store_true",
                             help="""Strip namespace prefixes from
                               displayed paths"""),
       optparse.make_option("--no-structure",
                             dest="no_structure",
                             action="store_true",
                             help="""Do not generate docs for structure-only nodes (e.g., containers)"""),
       optparse.make_option("--doc-title",
                             dest="doc_title",
                             action="store",
                             type="string",
                             help="""Set the title of the output documentation page"""),
               ]
   g = optparser.add_option_group("docs output specific options")
   g.add_options(optlist)
def options():
    return [
        op.make_option(
            "--remote-host",
            dest="remote_host",
            default="localhost",
            help="Hostname of the couchdb-lucene server. [%default]",
        ),
        op.make_option(
            "--remote-port",
            dest="remote_port",
            type="int",
            default=5985,
            help="Port of the couchdb-lucene server. [%default]",
        ),
        op.make_option(
            "--local-host", dest="local_host", default="localhost", help="Hostname of this couchdb instance. [%default]"
        ),
        op.make_option(
            "--local-port",
            dest="local_port",
            type="int",
            default=5984,
            help="Port of this couchdb instance. [%default]",
        ),
    ]
예제 #23
0
def run():

    epilog = "Available commands are: %s" % ', '.join(sorted(COMMANDS))
    usage='%prog command [options] [args]'

    available_options = [
        make_option('-u', '--username', 
            dest='username', default=User.DEFAULT_USERNAME, help="specifies a username (default is %s)" % User.DEFAULT_USERNAME),
        make_option('-f', '--fetch',
            dest='fetch_data', action='store_true', help='fetches each feed data after import'),
        make_option('-p', '--port', default='8080', 
            dest='port', type='int', help='specifies the port to serve on (default 8080)'),
        make_option('-r', '--allow-remote-access', action='store_true', dest='allow_remote_access', help='binds to 0.0.0.0 instead of localhost'),
    ]
        
    parser = OptionParser(option_list=available_options, usage=usage, epilog=epilog)
     
    command_options, args = parser.parse_args()
    if not args:
        parser.error('no command given, use the -h option for a list of available commands')
        
    command_name, command_args = args[0].lower(), args[1:]

    cc = CommandController()
    try:
        cc.run_command(command_name, command_options, command_args)
    except CommandError, ex:
        parser.error(ex)
예제 #24
0
파일: base.py 프로젝트: esodan/icbuild
 def __init__(self):
     Command.__init__(
         self,
         [
             make_option(
                 "-c", "--clean", action="store_true", dest="clean", default=False, help="clean before building"
             ),
             make_option(
                 "-n",
                 "--no-network",
                 action="store_true",
                 dest="nonetwork",
                 default=False,
                 help="skip version control update",
             ),
             make_option(
                 "-f",
                 "--force",
                 action="store_true",
                 dest="force_policy",
                 default=False,
                 help="build even if policy says not to",
             ),
             make_option(
                 "-m",
                 "--arch",
                 action="store",
                 dest="arch",
                 default="Win32",
                 choices=["Win32", "x64"],
                 help="build for a specific architecture",
             ),
         ],
     )
예제 #25
0
 def __init__(self, context=None):
     super(ManifestGener, self).__init__(context)
     self.option_list.extend([
         make_option("-o", "--output-dir", dest="output", default = "manifest_output",
                 help="the output folder, default is manifest_output"),
          make_option("-p", "--platform", dest="platforms", default = [], action="append",
                 help="Platform: Galileo, Minnow..")])
예제 #26
0
 def setUp(self):
     options = [
         make_option("-a", type="int", nargs=2, dest="a"),
         make_option("-b", action="store_true", dest="b"),
         make_option("-c", "--callback", action="callback", callback=self.variable_args, dest="c"),
     ]
     self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE, option_list=options)
예제 #27
0
def main(paths=None):
    """handle user command when run as top level program"""
    try:
       configdir=__setprojpath.CONFIGDIR
    except:
       configdir='config'       
    try:
       pmmldir=__setprojpath.PMMLDIR
    except:
       pmmldir='models'       
    try:
        bootdef=__setprojpath.CONFIGDIR+'/bootstrapConfig'
    except:
        bootdef='config/bootstrapConfig'
    from optparse import OptionParser, make_option

    usage = 'usage: %prog [options]'
    version = "%prog 0.0 alpha"
    option_list = [
      make_option('-b','--bootstrap',metavar='bootstrapConfig',default=bootdef,help='Configuration file used to specify connection info for db (default "config/bootstrapConfig")'),
      make_option('-z','--zero',metavar='zero',default=None,action="store_true",help='If set, creates empty table. Default is not set.'),
    ]


    parser = OptionParser(usage=usage,version=version,option_list=option_list)
    (opt,args) = parser.parse_args()
    userInitializeModels(opt.bootstrap,opt.zero,pmmldir)
예제 #28
0
 def __init__(self):
     super(BaseInternalRebaselineCommand, self).__init__(options=[
         self.results_directory_option,
         self.suffixes_option,
         optparse.make_option("--builder", help="Builder to pull new baselines from"),
         optparse.make_option("--test", help="Test to rebaseline"),
         ])
예제 #29
0
def platform_options(use_globs=False):
    return [
        optparse.make_option(
            "--platform",
            action="store",
            help=(
                'Glob-style list of platform/ports to use (e.g., "mac*")'
                if use_globs
                else 'Platform to use (e.g., "mac-lion")'
            ),
        ),
        optparse.make_option(
            "--efl",
            action="store_const",
            dest="platform",
            const=("efl*" if use_globs else "efl"),
            help=("Alias for --platform=efl*" if use_globs else "Alias for --platform=efl"),
        ),
        optparse.make_option(
            "--gtk",
            action="store_const",
            dest="platform",
            const=("gtk*" if use_globs else "gtk"),
            help=("Alias for --platform=gtk*" if use_globs else "Alias for --platform=gtk"),
        ),
        optparse.make_option(
            "--qt",
            action="store_const",
            dest="platform",
            const=("qt*" if use_globs else "qt"),
            help=("Alias for --platform=qt" if use_globs else "Alias for --platform=qt"),
        ),
    ]
예제 #30
0
    def __init__(self):
        options = [
            make_option("--all", action="store_true", default=False, help="display the expectations for *all* tests"),
            make_option(
                "-x",
                "--exclude-keyword",
                action="append",
                default=[],
                help='limit to tests not matching the given keyword (for example, "skip", "slow", or "crash". May specify multiple times',
            ),
            make_option(
                "-i",
                "--include-keyword",
                action="append",
                default=[],
                help='limit to tests with the given keyword (for example, "skip", "slow", or "crash". May specify multiple times',
            ),
            make_option(
                "--csv",
                action="store_true",
                default=False,
                help="Print a CSV-style report that includes the port name, modifiers, tests, and expectations",
            ),
            make_option(
                "-f",
                "--full",
                action="store_true",
                default=False,
                help="Print a full TestExpectations-style line for every match",
            ),
        ] + port_options(platform="port/platform to use. Use glob-style wildcards for multiple ports (implies --csv)")

        AbstractDeclarativeCommand.__init__(self, options=options)
        self._expectation_models = {}
예제 #31
0
class TaskListCommand(BaseCommand):
    """
    Run list of predifined tasks from command line
    """
    requires_model_validation = False  # if True, breaks coverage of models.py files

    option_list = BaseCommand.option_list + (
        make_option(
            '--all',
            action='store_true',
            dest='test_all',
            default=False,
            help=
            'Ignore PROJECT_APPS settings and run through all INSTALLED_APPS'),
        make_option('--interactive',
                    action='store_true',
                    dest='interactive',
                    default=False,
                    help='Allow to ask user input'),
        make_option(
            '--debug',
            action='store_true',
            dest='debug',
            default=False,
            help=
            'Do not intercept stdout and stderr, friendly for console debuggers'
        ),
        make_option('--output-dir',
                    dest='output_dir',
                    default="reports",
                    help='Report files directory'),
    )

    def __init__(self):
        super(TaskListCommand, self).__init__()
        self.tasks_cls = [
            import_module(module_name).Task
            for module_name in self.get_task_list()
        ]

    def handle(self, *test_labels, **options):
        # instantiate tasks
        self.tasks = [
            task_cls(test_labels, options) for task_cls in self.tasks_cls
        ]

        # subscribe
        for signal_name, signal in inspect.getmembers(signals):
            for task in self.tasks:
                signal_handler = getattr(task, signal_name, None)
                if signal_handler:
                    signal.connect(signal_handler)

        # run
        test_runner_cls = get_runner()
        test_runner = test_runner_cls(
            output_dir=options['output_dir'],
            interactive=options['interactive'],
            debug=options['debug'],
            verbosity=int(options.get('verbosity', 1)),
            with_reports=options.get('with_reports', True))

        if test_runner.run_tests(test_labels):
            sys.exit(1)

    def get_task_list(self):
        """
        Return list of task modules for command

        Subclasses should override this method
        """
        return []

    def create_parser(self, *args):
        """
        Extend the option list with tasks specific options
        """
        parser = super(TaskListCommand, self).create_parser(*args)

        for task_cls in self.tasks_cls:
            option_group = OptionGroup(parser, task_cls.__module__, "")

            if task_cls.option_list:
                for option in task_cls.option_list:
                    option_group.add_option(option)
                parser.add_option_group(option_group)

        return parser
예제 #32
0
class Command(management_base.BaseCommand):
    args = "class_path"
    help = "Performs a reindex of the given document class."
    requires_model_validation = True
    option_list = management_base.BaseCommand.option_list + (
        optparse.make_option(
            '--background',
            action='store_true',
            dest='background',
            default=False,
            help="Should the reindexing be performed by background workers."),
        optparse.make_option(
            '--recreate-index',
            action='store_true',
            dest='recreate-index',
            default=False,
            help=
            "Should the index be dropped and recreated. THIS WILL ERASE ALL DATA!"
        ),
        optparse.make_option(
            '--start-pk',
            dest='start-pk',
            default="0",
            help=
            "Start with the specified primary key instead of the first one."))

    def handle(self, *args, **options):
        """
    Performs a reindex of the given document class.
    """
        if len(args) != 1:
            raise management_base.CommandError(
                "Reindex command takes exactly one argument!")

        # Load the specified document class
        class_path = args[0]
        module_name = class_path[:class_path.rfind(".")]
        class_name = class_path[class_path.rfind(".") + 1:]
        module = importlib.import_module(module_name)
        document_class = getattr(module, class_name)

        if not issubclass(document_class, itsy_document.Document):
            raise management_base.CommandError(
                "Specified class is not a valid Document!")

        if not document_class._meta.searchable or document_class._meta.abstract or document_class._meta.embedded:
            raise management_base.CommandError(
                "Specified document is not searchable!")

        if options.get("recreate-index"):
            # Drop the index and recreate it
            self.stdout.write("Recreating index...\n")
            document_class._meta.search_engine.drop()
            document_class._meta.emit_search_mappings()

        if options.get("background"):
            # Spawn the reindex task
            itsy_tasks.search_index_reindex.delay(document_class)

            # Notify the user that the reindex has started in the background
            self.stdout.write(
                "Reindex of %s has been initiated in the background.\n" %
                class_path)
        else:
            self.stdout.write("Performing foreground reindex of %s...\n" %
                              class_path)

            # Modify configuration for bulk indexing (disable index refresh)
            document_class._meta.search_engine.set_configuration(
                {"index": {
                    "refresh_interval": "-1"
                }})

            # Setup the primary key offset
            last_pk = int(options.get("start-pk", "0"))

            try:
                num_indexed = 0
                batch_size = 10000
                while True:
                    # Assume that primary keys are monotonically incrementing
                    self.stdout.write("Starting batch %d at pk=%s.\n" %
                                      (num_indexed // batch_size + 1, last_pk))
                    old_last_pk = last_pk
                    for document in document_class.find(
                            pk__gt=last_pk).order_by("pk").limit(batch_size):
                        try:
                            document.save(
                                target=itsy_document.DocumentSource.Search)
                        except KeyboardInterrupt:
                            self.stdout.write("ERROR: Aborted by user.\n")
                            raise
                        except:
                            # Print the exception and continue reindexing
                            traceback.print_exc()

                        last_pk = document.pk
                        num_indexed += 1
                        if num_indexed % 100 == 0:
                            self.stdout.write("Indexed %d documents.\n" %
                                              num_indexed)

                    if old_last_pk == last_pk:
                        self.stdout.write("Index finished at pk=%s.\n" %
                                          last_pk)
                        break
            except KeyboardInterrupt:
                self.stdout.write("Index aborted at pk=%s.\n" % last_pk)
            finally:
                # Restore index configuration after indexing
                document_class._meta.search_engine.set_configuration(
                    {"index": {
                        "refresh_interval": "1s"
                    }})

                # Perform index optimization
                self.stdout.write("Optimizing index...\n")
                document_class._meta.search_engine.optimize(max_num_segments=1)

            self.stdout.write("Reindex done.\n")
예제 #33
0
class Command(BaseCommand):
    args = "<document_path document_path ...>"
    help = "Render a wiki document"
    option_list = BaseCommand.option_list + (
        make_option('--all', dest="all", default=False,
                    action="store_true",
                    help="Render ALL documents"),
        make_option('--min-age', dest="min_age", default=600,
                    help="Documents rendered less than this many seconds ago "
                         "will be skipped"),
        make_option('--baseurl', dest="baseurl",
                    default=False,
                    help="Base URL to site"),
        make_option('--force', action="store_true", dest="force",
                    default=False,
                    help="Force rendering, first clearing record of any "
                         "rendering in progress"),
        make_option('--nocache', action="store_true", dest="nocache",
                    default=False,
                    help="Use Cache-Control: no-cache instead of max-age=0"),
        make_option('--defer', action="store_true", dest="defer",
                    default=False,
                    help="Defer rendering"),
    )

    def handle(self, *args, **options):
        self.options = options

        self.base_url = options['baseurl']
        if not self.base_url:
            from django.contrib.sites.models import Site
            site = Site.objects.get_current()
            self.base_url = 'http://%s' % site.domain

        if options['all']:
            logging.info(u"Querying ALL %s documents..." %
                         Document.objects.count())
            docs = Document.objects.order_by('-modified').iterator()
            for doc in docs:
                self.do_render(doc)

        else:
            if not len(args) == 1:
                raise CommandError("Need at least one document path to render")
            for path in args:
                # Accept a single page path from command line, but be liberal in
                # what we accept, eg: /en-US/docs/CSS (full path); /en-US/CSS (no
                # /docs); or even en-US/CSS (no leading slash)
                if path.startswith('/'):
                    path = path[1:]
                locale, sep, slug = path.partition('/')
                head, sep, tail = slug.partition('/')
                if head == 'docs':
                    slug = tail
                self.do_render(Document.objects.get(locale=locale, slug=slug))

    def do_render(self, doc):
        # Skip very recently rendered documents. This should help make it
        # easier to start and stop an --all command without needing to start
        # from the top of the list every time.
        if doc.last_rendered_at:
            now = datetime.datetime.now()
            render_age = now - doc.last_rendered_at
            min_age = datetime.timedelta(seconds=self.options['min_age'])
            if (render_age < min_age):
                logging.debug(u"Skipping %s (%s) - rendered %s sec ago" %
                              (doc, doc.get_absolute_url(), render_age))
                return

        if self.options['force']:
            doc.render_started_at = None

        if self.options['nocache']:
            cc = 'no-cache'
        else:
            cc = 'max-age=0'

        if self.options['defer']:
            logging.info(u"Queuing deferred render for %s (%s)" %
                          (doc, doc.get_absolute_url()))
            render_document.delay(doc, cc, self.base_url)
            logging.debug(u"Queued.")

        else:
            logging.info(u"Rendering %s (%s)" %
                         (doc, doc.get_absolute_url()))
            try:
                render_document(doc, cc, self.base_url)
                logging.debug(u"DONE.")
            except DocumentRenderingInProgress:
                logging.error(u"Rendering is already in progress for this document") 
예제 #34
0
class Command(BaseCommand):
    """Transfer json objects from production database to development objectstore database."""

    help = "Transfer json objects from production database to development objectstore database."

    option_list = BaseCommand.option_list + (
        make_option('--host',
                    action='store',
                    dest='host',
                    default='datazilla.mozilla.org',
                    help='Production host to transfer data from'),
        make_option(
            "--dev_project",
            action="store",
            dest="dev_project",
            default=None,
            help=("The project name in development to transfer data to.")),
        make_option(
            "--prod_project",
            action="store",
            dest="prod_project",
            default='talos',
            help=("The project name in production to transfer data from.")),
        make_option("--days_ago",
                    action="store",
                    dest="days_ago",
                    default="7",
                    help=("Number of days to retrieve data for")),
        make_option("--branch",
                    action="store",
                    dest="branch",
                    default="Mozilla-Inbound",
                    help=("The branch name to transfer data from.")),
        make_option("--logfile",
                    action="store",
                    dest="logfile",
                    default="{0}/transfer_production_data.log".format(
                        os.getcwd()),
                    help=("Log of revisions run.")),
    )

    def handle(self, *args, **options):
        """ Transfer data to a development project based on the args value. """

        host = options.get("host")
        dev_project = options.get("dev_project")
        prod_project = options.get("prod_project")
        branch = options.get("branch")
        days_ago = options.get("days_ago")
        logfile = options.get("logfile")

        if not host:
            self.println("You must supply a host name to retrieve data from " +
                         "--host hostname")
            return

        if not dev_project:
            self.println("You must supply a dev_project name to load data in.")
            return

        if not branch:
            self.println("You must supply a branch name to retrieve data for.")
            return

        #Set timeout so we don't hang
        timeout = 120
        socket.setdefaulttimeout(timeout)

        revisions_uri = 'refdata/pushlog/list'
        params = 'days_ago={0}&branches={1}'.format(days_ago, branch)
        url = "https://{0}/{1}?{2}".format(host, revisions_uri, params)

        json_data = ""

        #Retrieve revisions to iterate over
        try:
            response = urllib.urlopen(url)
            json_data = response.read()
        except socket.timeout:
            self.stdout.write("URL: {0}\nTimedout {1} seconds\n".format(
                url, timeout))
            sys.exit(0)

        data = json.loads(json_data)
        all_keys = data.keys()
        all_keys.sort()

        ##Keep track of revision already loaded##
        file_obj = open(logfile, 'w+')
        revisions_seen = set()
        for line in file_obj.readlines():
            revisions_seen.add(line.strip())

        revisions = []

        for key in all_keys:
            for revision in data[key]['revisions']:
                if revision not in revisions_seen:
                    revisions.append(revision)

        dm = PerformanceTestModel(dev_project)

        for revision in revisions:

            rawdata_uri = '{0}/testdata/raw/{1}/{2}/'.format(
                prod_project, branch, revision)
            rawdata_url = "https://{0}/{1}".format(host, rawdata_uri)

            raw_json_data = ""

            try:
                rawdata_response = urllib.urlopen(rawdata_url)
                raw_json_data = rawdata_response.read()
            except socket.timeout:
                self.stdout.write("URL: {0}\nTimedout {1} seconds\n".format(
                    rawdata_url, timeout))
                sys.exit(0)

            test_object_data = json.loads(raw_json_data)

            for test_object in test_object_data:
                id = dm.store_test_data(json.dumps(test_object), "")
                self.stdout.write("Revision:{0} Id:{1}\n".format(
                    revision, str(id)))

            #Record the revision as loaded
            file_obj.write(revision + "\n")

        file_obj.close()
        dm.disconnect()
예제 #35
0
파일: state.py 프로젝트: viggeh/fabric
# 'dest' value becomes the environment key and the value, the env value).
#
# Keep in mind that optparse changes hyphens to underscores when automatically
# deriving the `dest` name, e.g. `--reject-unknown-hosts` becomes
# `reject_unknown_hosts`.
#
# Furthermore, *always* specify some sort of default to avoid ending up with
# optparse.NO_DEFAULT (currently a two-tuple)! In general, None is a better
# default than ''.
#
# User-facing documentation for these are kept in docs/env.rst.
env_options = [

    make_option('-a', '--no_agent',
        action='store_true',
        default=False,
        help="don't use the running SSH agent"
    ),

    make_option('-A', '--forward-agent',
        action='store_true',
        default=False,
        help="forward local agent to remote end"
    ),

    make_option('--abort-on-prompts',
        action='store_true',
        default=False,
        help="abort instead of prompting (for password, host, etc)"
    ),
예제 #36
0
class Command(LabelCommand):
    option_list = LabelCommand.option_list + (make_option(
        "-o",
        "--owner",
        help="add packages as OWNER",
        metavar="OWNER",
        default=None), )
    help = """Add one or more packages to the repository. Each argument can
be a package name or a URL to an archive or egg. Package names honour
the same rules as easy_install with regard to indicating versions etc.

If a version of the package exists, but is older than what we want to install,
the owner remains the same.

For new packages there needs to be an owner. If the --owner option is present
we use that value. If not, we try to match the maintainer of the package, form
the metadata, with a user in out database, based on the If it's a new package
and the maintainer emailmatches someone in our user list, we use that. If not,
the package can not be
added"""

    def __init__(self, *args, **kwargs):
        self.pypi = PackageIndex()
        LabelCommand.__init__(self, *args, **kwargs)

    def handle_label(self, label, **options):
        with tempdir() as tmp:
            path = self.pypi.download(label, tmp)
            if path:
                self._save_package(path, options["owner"])
            else:
                print "Could not add %s. Not found." % label

    def _save_package(self, path, ownerid):
        meta = self._get_meta(path)

        try:
            # can't use get_or_create as that demands there be an owner
            package = Package.objects.get(name=meta.name)
            isnewpackage = False
        except Package.DoesNotExist:
            package = Package(name=meta.name)
            isnewpackage = True

        release = package.get_release(meta.version)
        if not isnewpackage and release and release.version == meta.version:
            print "%s-%s already added" % (meta.name, meta.version)
            return

        # algorithm as follows: If owner is given, try to grab user with that
        # username from db. If doesn't exist, bail. If no owner set look at
        # mail address from metadata and try to get that user. If it exists
        # use it. If not, bail.
        owner = None

        if ownerid:
            try:
                if "@" in ownerid:
                    owner = User.objects.get(email=ownerid)
                else:
                    owner = User.objects.get(username=ownerid)
            except User.DoesNotExist:
                pass
        else:
            try:
                owner = User.objects.get(email=meta.author_email)
            except User.DoesNotExist:
                pass

        if not owner:
            print "No owner defined. Use --owner to force one"
            return

        # at this point we have metadata and an owner, can safely add it.

        package.owner = owner
        # Some packages don't have proper licence, seems to be a problem
        # with setup.py upload. Use "UNKNOWN"
        package.license = meta.license or "Unknown"
        package.metadata_version = meta.metadata_version
        package.author = meta.author
        package.home_page = meta.home_page
        package.download_url = meta.download_url
        package.summary = meta.summary
        package.description = meta.description
        package.author_email = meta.author_email

        package.save()

        for classifier in meta.classifiers:
            package.classifiers.add(
                Classifier.objects.get_or_create(name=classifier)[0])

        release = Release()
        release.version = meta.version
        release.package = package
        filename = os.path.basename(path)

        file = File(open(path, "rb"))
        release.distribution.save(filename, file)
        release.save()
        print "%s-%s added" % (meta.name, meta.version)

    def _get_meta(self, path):
        data = pkginfo.get_metadata(path)
        if data:
            return data
        else:
            print "Couldn't get metadata from %s. Not added to chishop" % os.path.basename(
                path)
            return None
예제 #37
0
class Command(BaseCommand):
    option_list = BaseCommand.option_list + (
        make_option(
            "--unsorted",
            "-u",
            action="store_true",
            dest="unsorted",
            help="Show urls unsorted but same order as found in url patterns"),
        make_option("--language",
                    "-l",
                    dest="language",
                    help="Set the language code (useful for i18n_patterns)"),
        make_option("--decorator",
                    "-d",
                    dest="decorator",
                    help="Show the presence of given decorator on views"),
        make_option("--format",
                    "-f",
                    dest="format_style",
                    default="dense",
                    help="Style of the output. Choices: %s" % FMTR.keys()))

    help = "Displays all of the url matching routes for the project."

    requires_model_validation = True

    def handle(self, *args, **options):
        if args:
            appname, = args

        style = color_style()

        if settings.ADMIN_FOR:
            settings_modules = [
                __import__(m, {}, {}, ['']) for m in settings.ADMIN_FOR
            ]
        else:
            settings_modules = [settings]

        language = options.get('language', None)
        if language is not None:
            activate(language)

        decorator = options.get('decorator')
        if decorator is None:
            decorator = 'login_required'

        format_style = options.get('format_style')
        if format_style not in FMTR:
            raise CommandError(
                "Format style '%s' does not exist. Options: %s" %
                (format_style, FMTR.keys()))
        fmtr = FMTR[format_style]

        views = []
        for settings_mod in settings_modules:
            try:
                urlconf = __import__(settings_mod.ROOT_URLCONF, {}, {}, [''])
            except Exception as e:
                if options.get('traceback', None):
                    import traceback
                    traceback.print_exc()
                print(
                    style.ERROR("Error occurred while trying to load %s: %s" %
                                (settings_mod.ROOT_URLCONF, str(e))))
                continue

            view_functions = extract_views_from_urlpatterns(
                urlconf.urlpatterns)
            for (func, regex, url_name) in view_functions:

                if hasattr(func, '__globals__'):
                    func_globals = func.__globals__
                elif hasattr(func, 'func_globals'):
                    func_globals = func.func_globals
                else:
                    func_globals = {}

                decorators = [decorator] if decorator in func_globals else []

                if isinstance(func, functools.partial):
                    func = func.func
                    decorators.insert(0, 'functools.partial')

                if hasattr(func, '__name__'):
                    func_name = func.__name__
                elif hasattr(func, '__class__'):
                    func_name = '%s()' % func.__class__.__name__
                else:
                    func_name = re.sub(r' at 0x[0-9a-f]+', '', repr(func))

                views.append(
                    fmtr.format(
                        module='{0}.{1}'.format(style.MODULE(func.__module__),
                                                style.MODULE_NAME(func_name)),
                        url_name=style.URL_NAME(url_name or ''),
                        url=style.URL(simplify_regex(regex)),
                        decorator=', '.join(decorators),
                    ))

        if not options.get('unsorted', False):
            views = sorted(views)

        if format_style == 'aligned':
            views = [row.split(',') for row in views]
            widths = [len(max(columns, key=len)) for columns in zip(*views)]
            views = [
                '   '.join('{0:<{1}}'.format(cdata, width)
                           for width, cdata in zip(widths, row))
                for row in views
            ]
        elif format_style == 'table':
            # Reformat all data and show in a table format

            views = [row.split(',') for row in views]
            widths = [len(max(columns, key=len)) for columns in zip(*views)]
            table_views = []

            header = (style.MODULE_NAME('URL'), style.MODULE_NAME('Module'),
                      style.MODULE_NAME('Name'),
                      style.MODULE_NAME('Decorator'))
            table_views.append(' | '.join(
                '{0:<{1}}'.format(title, width)
                for width, title in zip(widths, header)))
            table_views.append('-+-'.join('-' * width for width in widths))

            for row in views:
                table_views.append(' | '.join(
                    '{0:<{1}}'.format(cdata, width)
                    for width, cdata in zip(widths, row)))

            # Replace original views so we can return the same object
            views = table_views

        return "\n".join([v for v in views]) + "\n"
예제 #38
0
class WebKitPatch(Host):
    # FIXME: It might make more sense if this class had a Host attribute
    # instead of being a Host subclass.

    global_options = [
        optparse.make_option('-v',
                             '--verbose',
                             action='store_true',
                             dest='verbose',
                             default=False,
                             help='enable all logging'),
        optparse.make_option('-d',
                             '--directory',
                             action='append',
                             default=[],
                             help='Directory to look at for changed files'),
    ]

    def __init__(self, path):
        super(WebKitPatch, self).__init__()
        self._path = path
        self.commands = [
            AnalyzeBaselines(),
            CopyExistingBaselines(),
            CrashLog(),
            FlakyTests(),
            OptimizeBaselines(),
            PrettyDiff(),
            PrintBaselines(),
            PrintExpectations(),
            Rebaseline(),
            RebaselineCL(),
            RebaselineExpectations(),
            RebaselineTest(),
        ]
        self.help_command = HelpCommand(tool=self)
        self.commands.append(self.help_command)

    def main(self, argv=None):
        argv = argv or sys.argv
        (command_name, args) = self._split_command_name_from_args(argv[1:])

        option_parser = self._create_option_parser()
        self._add_global_options(option_parser)

        command = self.command_by_name(command_name) or self.help_command
        if not command:
            option_parser.error('%s is not a recognized command', command_name)

        command.set_option_parser(option_parser)
        (options, args) = command.parse_args(args)

        result = command.check_arguments_and_execute(options, args, self)
        return result

    def path(self):
        return self._path

    @staticmethod
    def _split_command_name_from_args(args):
        # Assume the first argument which doesn't start with "-" is the command name.
        command_index = 0
        for arg in args:
            if arg[0] != '-':
                break
            command_index += 1
        else:
            return (None, args[:])

        command = args[command_index]
        return (command, args[:command_index] + args[command_index + 1:])

    def _create_option_parser(self):
        usage = 'Usage: %prog [options] COMMAND [ARGS]'
        name = optparse.OptionParser().get_prog_name()
        return HelpPrintingOptionParser(
            epilog_method=self.help_command.help_epilog,
            prog=name,
            usage=usage)

    def _add_global_options(self, option_parser):
        global_options = self.global_options or []
        for option in global_options:
            option_parser.add_option(option)

    def name(self):
        return optparse.OptionParser().get_prog_name()

    def should_show_in_main_help(self, command):
        return command.show_in_main_help

    def command_by_name(self, command_name):
        for command in self.commands:
            if command_name == command.name:
                return command
        return None
예제 #39
0
class Command(BaseCommand):
    args = ''
    help = "Run the background collector to fetch metrics from /jmx on each server."

    option_list = BaseCommand.option_list + (make_option(
        "--use_threadpool",
        action="store_true",
        default=False,
        help="Use thread pool to store metrics to database if the flag is on."
    ), )

    def handle(self, *args, **options):
        gc.set_debug(gc.DEBUG_STATS)

        self.args = args
        self.options = options

        self.stdout.write("args: %r\n" % (args, ))
        self.stdout.write("options: %r\n" % options)

        self.collector_config = CollectorConfig(self.args, self.options)
        self.update_active_tasks()
        self.region_operation_aggregator = RegionOperationMetricAggregator(
            self.collector_config)
        # we start to aggregate region operation metric after one period
        reactor.callLater(
            self.collector_config.period + 1,
            self.region_operation_aggregator.aggregate_region_operation_metric)
        self.fetch_metrics()

    def update_active_tasks(self):
        # Mark all current tasks as deactive.
        Service.objects.all().update(active=False)
        Cluster.objects.all().update(active=False)
        Job.objects.all().update(active=False)
        Task.objects.all().update(active=False)

        # Add all active tasks
        self.metric_sources = []
        for service_name, service in self.collector_config.services.iteritems(
        ):
            # Save to database.
            # The active field has the default value True.
            service_record, created = Service.objects.get_or_create(
                name=service_name, defaults={"metric_url": service.metric_url})
            if not created:
                # Mark it as active if it exists.
                service_record.active = True
                service_record.save()

            for cluster_name, cluster in service.clusters.iteritems():
                cluster_record, created = Cluster.objects.get_or_create(
                    service=service_record, name=cluster_name)
                if not created:
                    cluster_record.active = True
                    cluster_record.save()

                for job_name in service.jobs:
                    job_record, created = Job.objects.get_or_create(
                        cluster=cluster_record, name=job_name)
                    if not created:
                        job_record.active = True
                        job_record.save()

                    job = cluster.jobs[job_name]
                    # We assume http port is always base_port + 1
                    port = job.base_port + 1
                    for task_id, host in job.hostnames.iteritems():
                        task_record, created = Task.objects.get_or_create(
                            job=job_record,
                            task_id=task_id,
                            defaults={
                                "host": host,
                                "port": port
                            })
                        if not created or task_record.host != host or task_record.port != port:
                            task_record.active = True
                            task_record.host = host
                            task_record.port = port
                            task_record.save()
                        self.metric_sources.append(
                            MetricSource(self.collector_config, task_record))

    def fetch_metrics(self):
        for metric_source in self.metric_sources:
            # Randomize the start time of each metric source.
            # Because StatusUpdater will always update cluster status every 'self.collector_config.period',
            # here, we use 'self.collector_config.period - 2' to give each task at least 2 seconds to
            # download page and update its status into database before StatusUpdater starting to update cluster
            # status based on each task's status
            wait_time = random.uniform(0, self.collector_config.period - 2)
            logger.info("%r waiting %f seconds for %s...", metric_source.task,
                        wait_time, metric_source.url)
            reactor.callLater(wait_time, metric_source.fetch_metrics)

        status_updater = StatusUpdater(self.collector_config)
        reactor.callLater(self.collector_config.period + 1,
                          status_updater.update_status)

        reactor.run()
            print("Contact: ", measure["Contact"])

        if "Interval" in measure:
            for i in measure["Interval"]:
                print("Interval: ", i)


if __name__ == "__main__":
    dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)

    bus = dbus.SystemBus()

    option_list = [
        make_option("-i",
                    "--adapter",
                    action="store",
                    type="string",
                    dest="adapter"),
        make_option("-b",
                    "--device",
                    action="store",
                    type="string",
                    dest="address"),
    ]

    parser = OptionParser(option_list=option_list)

    (options, args) = parser.parse_args()

    if not options.address:
        print("Usage: %s [-i <adapter>] -b <bdaddr> [cmd]" % (sys.argv[0]))
예제 #41
0
class Command(BaseCommand):
    help = 'Import everything from a MediaWiki'
    args = 'ApiUrl Username [Password]'

    articles_worked_on = []
    articles_imported = []
    matching_old_link_new_link = {}

    option_list = BaseCommand.option_list + (
        make_option(
            '--user-matching',
            action='append',
            dest='user_matching',
            default=[],
            help='List of <username>:django_user_pk to do the matchin'),
        make_option('--replace_existing',
                    action='store_true',
                    dest='replace_existing',
                    default=False,
                    help='Replace existing pages'),
    )

    def get_params(self, args):
        """Return the list of params"""
        try:
            api_url = args[0]
        except IndexError:
            raise CommandError(
                'You need to provide the url to the MediaWiki API')

        try:
            api_username = args[1]
        except IndexError:
            raise CommandError('You need to provide an username')

        try:
            api_password = args[2]
        except IndexError:
            api_password = getpass.getpass('Please enter the API password: '******'You need to provide a password')

        return (api_url, api_username, api_password)

    def get_all_pages(self, api, site):
        """Return all pages on the wiki"""

        from wikitools.pagelist import listFromQuery

        result = api.APIRequest(site, {
            'action': 'query',
            'generator': 'allpages'
        }).query()

        return listFromQuery(site, result['query']['pages'])

    def import_page(self, api, site, page, current_site, url_root,
                    user_matching, replace_existing):

        import pypandoc

        # Filter titles, to avoid stranges charaters.
        title = only_printable(page.title)
        urltitle = slugify(only_printable(urllib.unquote(page.urltitle))[:50])

        added = 1

        while urltitle in self.articles_worked_on:
            title = only_printable(page.title) + " " + str(added)
            urltitle = only_printable(
                slugify((urllib.unquote(page.urltitle))[:47] + " " +
                        str(added)))
            added += 1

        self.articles_worked_on.append(urltitle)

        print("Working on %s (%s)" % (title, urltitle))

        # Check if the URL path already exists
        try:
            urlp = URLPath.objects.get(slug=urltitle)

            self.matching_old_link_new_link[
                page.title] = urlp.article.get_absolute_url()

            if not replace_existing:
                print("\tAlready existing, skipping...")
                return

            print("\tDestorying old version of the article")
            urlp.article.delete()

        except URLPath.DoesNotExist:
            pass

        # Create article
        article = Article()

        for history_page in page.getHistory()[-2:][::-1]:

            try:
                if history_page['user'] in user_matching:
                    user = get_user_model().objects.get(
                        pk=user_matching[history_page['user']])
                else:
                    user = get_user_model().objects.get(
                        username=history_page['user'])
            except get_user_model().DoesNotExist:
                print(
                    "\tCannot found user with username=%s. Use --user-matching \"%s:<user_pk>\" to manualy set it"
                    % (
                        history_page['user'],
                        history_page['user'],
                    ))
                user = None

            article_revision = ArticleRevision()
            article_revision.content = pypandoc.convert(
                history_page['*'], 'md', 'mediawiki')
            article_revision.title = title
            article_revision.user = user
            article_revision.owner = user

            article.add_revision(article_revision, save=True)

            article_revision.created = history_page['timestamp']
            article_revision.save()

        # Updated lastest content WITH expended templates
        # TODO ? Do that for history as well ?
        article_revision.content = pypandoc.convert(
            striptags(page.getWikiText(True, True).decode('utf-8')).replace(
                '__NOEDITSECTION__', '').replace('__NOTOC__', ''), 'md',
            'mediawiki')
        article_revision.save()

        article.save()

        upath = URLPath.objects.create(site=current_site,
                                       parent=url_root,
                                       slug=urltitle,
                                       article=article)
        article.add_object_relation(upath)

        self.matching_old_link_new_link[
            page.title] = upath.article.get_absolute_url()

        self.articles_imported.append((article, article_revision))

    def update_links(self):
        """Update link in imported articles"""

        # TODO: nsquare is bad
        for (article, article_revision) in self.articles_imported:
            print("Updating links of %s" % (article_revision.title, ))
            for id_from, id_to in six.iteritems(
                    self.matching_old_link_new_link):
                print("Replacing (%s \"wikilink\") with (%s)" %
                      (id_from, id_to))
                article_revision.content = article_revision.content.replace(
                    "(%s \"wikilink\")" % (id_from, ), "(%s)" % (id_to, ))

            article_revision.save()

    def handle(self, *args, **options):

        try:
            import wikitools
        except ImportError:
            raise CommandError(
                'You need to install wikitools to use this command !')

        try:
            import pypandoc
        except ImportError:
            raise CommandError('You need to install pypandoc')

        user_matching = {}

        for um in options['user_matching']:
            mu = um[::-1]
            kp, emanresu = mu.split(':', 1)

            pk = kp[::-1]
            username = emanresu[::-1]

            user_matching[username] = pk

        api_url, api_username, api_password = self.get_params(args)

        site = wikitools.wiki.Wiki(api_url)
        site.login(api_username, api_password)

        pages = self.get_all_pages(wikitools.api, site)

        current_site = Site.objects.get_current()
        url_root = URLPath.root()

        for page in pages:
            self.import_page(wikitools.api, site, page, current_site, url_root,
                             user_matching, options['replace_existing'])

        self.update_links()
예제 #42
0
파일: sisinfo.py 프로젝트: Symbian9/sisinfo
			
class ContentPrinter :
	def __init__(self) :
		pass
		
	def handleField(self, field, depth) :
		buf = ""
		for i in range(depth) :
			buf += "  "
		buf += sisfields.FieldNames[field.type] + " "
		if len(field.readableStr()) > 0 :
			buf += field.readableStr()
		print buf

OptionList = [
	optparse.make_option("-f", "--file", help="Name of the SIS file to inspect", metavar="FILENAME"),
	optparse.make_option("-i", "--info", help="Print information about SIS contents", action="store_true", default=False),
	optparse.make_option("-s", "--structure", help="Print SIS file structure", action="store_true", default=False),
	optparse.make_option("-e", "--extract", help="Extract the files from the SIS file to PATH", metavar="PATH"),
	optparse.make_option("-c", "--certificate", help="Print certificate information", action="store_true", default=False),
	]
	
def validateArguments(options, args) :
    result = True
    if not options.file :
		result = False
		raise Exception("Filename must be defined")
    if not (options.structure or options.extract or options.info or options.certificate) :
		result = False
		raise Exception("At least one of the switches: -s, -e, -i, or -c must be defined")
    if options.certificate and not PyASN1Availabe :
예제 #43
0
class Command(BaseDbBackupCommand):
    help = """
    Restore a backup from storage, encrypted and/or compressed.
    """
    option_list = BaseDbBackupCommand.option_list + (
        make_option("-d", "--database", help="Database to restore"),
        make_option(
            "-i", "--input-filename", help="Specify filename to backup from"),
        make_option("-I",
                    "--input-path",
                    help="Specify path on local filesystem to backup from"),
        make_option(
            "-s", "--servername", help="Use a different servername backup"),
        make_option("-l",
                    "--list",
                    action='store_true',
                    default=False,
                    help="List backups in the backup directory"),
        make_option("-c",
                    "--decrypt",
                    help="Decrypt data before restoring",
                    default=False,
                    action='store_true'),
        make_option("-p",
                    "--passphrase",
                    help="Passphrase for decrypt file",
                    default=None),
        make_option("-z",
                    "--uncompress",
                    help="Uncompress gzip data before restoring",
                    action='store_true'),
    )

    def handle(self, *args, **options):
        """Django command handler."""
        self.verbosity = int(options.get('verbosity'))
        self.quiet = options.get('quiet')
        self.database = options.get('database')
        database_key = self.database if self.database else 'default'
        self.connector = get_connector(database_key)
        try:
            connection.close()
            self.filename = options.get('input_filename')
            self.path = options.get('input_path')
            self.servername = options.get('servername')
            self.decrypt = options.get('decrypt')
            self.uncompress = options.get('uncompress')
            self.passphrase = options.get('passphrase')
            self.interactive = options.get('interactive')
            self.database = self._get_database(options)
            self.storage = BaseStorage.storage_factory()
            if options.get('list'):
                return self._list_backups()
            self._restore_backup()
        except StorageError as err:
            raise CommandError(err)

    def _get_database(self, options):
        """Get the database to restore."""
        database_key = options.get('database')
        if not database_key:
            if len(settings.DATABASES) >= 2:
                errmsg = "Because this project contains more than one database, you"\
                    " must specify the --database option."
                raise CommandError(errmsg)
            database_key = list(settings.DATABASES.keys())[0]
        return settings.DATABASES[database_key]

    def _restore_backup(self):
        """Restore the specified database."""
        self.logger.info("Restoring backup for database: %s",
                         self.database['NAME'])
        if self.path:
            input_filename = self.path
            input_file = self.read_local_file(self.path)
        else:
            if self.filename:
                input_filename = self.filename
            # Fetch the latest backup if filepath not specified
            else:
                self.logger.info("Finding latest backup")
                try:
                    input_filename = self.storage.get_latest_backup(
                        encrypted=self.decrypt, compressed=self.uncompress)
                except StorageError as err:
                    raise CommandError(err.args[0])
            input_file = self.storage.read_file(input_filename)

        self.logger.info("Restoring: %s" % input_filename)
        if self.decrypt:
            unencrypted_file, input_filename = utils.unencrypt_file(
                input_file, input_filename, self.passphrase)
            input_file.close()
            input_file = unencrypted_file
        if self.uncompress:
            uncompressed_file, input_filename = utils.uncompress_file(
                input_file, input_filename)
            input_file.close()
            input_file = uncompressed_file
        self.logger.info("Restore tempfile created: %s",
                         utils.handle_size(input_file))
        if self.interactive:
            answer = input("Are you sure you want to continue? [Y/n]")
            if answer.lower().startswith('n'):
                self.logger.info("Quitting")
                sys.exit(0)
        input_file.seek(0)
        self.connector.restore_dump(input_file)

    # TODO: Remove this
    def _list_backups(self):
        """List backups in the backup directory."""
        msg = "'dbbrestore --list' is deprecated, use 'listbackup'."
        warnings.warn(msg, DeprecationWarning)
        self.logger.info("Listing backups on %s in /%s:", self.storage.name,
                         self.storage.backup_dir)
        for filepath in self.storage.list_directory():
            self.logger.info("  %s", os.path.basename(filepath))
            # TODO: Implement filename_details method
            # print(utils.filename_details(filepath))

    def read_local_file(self, path):
        """Open file on local filesystem."""
        return open(path, 'rb')
예제 #44
0
class Command(BaseCommand):
    help = 'Installs the named fixture(s) in the database.'
    args = "fixture [fixture ...]"

    option_list = BaseCommand.option_list + (
        make_option('--database',
                    action='store',
                    dest='database',
                    default=DEFAULT_DB_ALIAS,
                    help='Nominates a specific database to load '
                    'fixtures into. Defaults to the "default" database.'),
        make_option(
            '-e',
            '--exclude',
            dest='exclude',
            action='append',
            default=[],
            help=
            'App to exclude (use multiple --exclude to exclude multiple apps).'
        ),
    )

    def handle(self, *fixture_labels, **options):
        using = options.get('database', DEFAULT_DB_ALIAS)
        excluded_apps = options.get('exclude', [])

        connection = connections[using]
        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        object_count = 0
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(
                        self.namelist()
                    ) == 1, "Zip-compressed fixtures must contain only one file."

            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None: file,
            'gz': gzip.GzipFile,
            'zip': SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(path)
            else:
                # It's a models.py module
                app_module_paths.append(app.__file__)

        app_fixtures = [
            os.path.join(os.path.dirname(path), 'fixtures')
            for path in app_module_paths
        ]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')

            if len(parts) > 1 and parts[-1] in compression_types:
                compression_formats = [parts[-1]]
                parts = parts[:-1]
            else:
                compression_formats = compression_types.keys()

            if len(parts) == 1:
                fixture_name = parts[0]
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    print "Loading '%s' fixtures..." % fixture_name
            else:
                sys.stderr.write(
                    self.style.ERROR(
                        "Problem installing fixture '%s': %s is not a known serialization format."
                        % (fixture_name, format)))
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(
                    settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    print "Checking %s for fixtures..." % humanize(fixture_dir)

                label_found = False
                for combo in product([using, None], formats,
                                     compression_formats):
                    database, format, compression_format = combo
                    file_name = '.'.join(
                        p for p in
                        [fixture_name, database, format, compression_format]
                        if p)

                    if verbosity > 1:
                        print "Trying %s for %s fixture '%s'..." % \
                            (humanize(fixture_dir), file_name, fixture_name)
                    full_path = os.path.join(fixture_dir, file_name)
                    open_method = compression_types[compression_format]
                    try:
                        fixture = open_method(full_path, 'r')
                        if label_found:
                            fixture.close()
                            print self.style.ERROR(
                                "Multiple fixtures named '%s' in %s. Aborting."
                                % (fixture_name, humanize(fixture_dir)))
                            transaction.rollback(using=using)
                            transaction.leave_transaction_management(
                                using=using)
                            return
                        else:
                            fixture_count += 1
                            objects_in_fixture = 0
                            if verbosity > 0:
                                print "Installing %s fixture '%s' from %s." % \
                                    (format, fixture_name, humanize(fixture_dir))
                            try:
                                objects = serializers.deserialize(format,
                                                                  fixture,
                                                                  using=using)
                                for obj in objects:
                                    if obj.object._meta.app_label not in excluded_apps:
                                        objects_in_fixture += 1
                                        models.add(obj.object.__class__)
                                        obj.save(using=using)
                                object_count += objects_in_fixture
                                label_found = True
                            except (SystemExit, KeyboardInterrupt):
                                raise
                            except Exception:
                                import traceback
                                fixture.close()
                                transaction.rollback(using=using)
                                transaction.leave_transaction_management(
                                    using=using)
                                if show_traceback:
                                    traceback.print_exc()
                                else:
                                    sys.stderr.write(
                                        self.style.ERROR(
                                            "Problem installing fixture '%s': %s\n"
                                            % (full_path, ''.join(
                                                traceback.format_exception(
                                                    sys.exc_type,
                                                    sys.exc_value,
                                                    sys.exc_traceback)))))
                                return
                            fixture.close()

                            # If the fixture we loaded contains 0 objects, assume that an
                            # error was encountered during fixture loading.
                            if objects_in_fixture == 0:
                                sys.stderr.write(
                                    self.style.ERROR(
                                        "No fixture data found for '%s'. (File format may be invalid.)"
                                        % (fixture_name)))
                                transaction.rollback(using=using)
                                transaction.leave_transaction_management(
                                    using=using)
                                return

                    except Exception, e:
                        if verbosity > 1:
                            print "No %s fixture '%s' in %s." % \
                                (format, fixture_name, humanize(fixture_dir))

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                self.style, models)
            if sequence_sql:
                if verbosity > 1:
                    print "Resetting sequences"
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)

        if object_count == 0:
            if verbosity > 1:
                print "No fixtures found."
        else:
            if verbosity > 0:
                print "Installed %d object(s) from %d fixture(s)" % (
                    object_count, fixture_count)

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()
예제 #45
0
파일: copy.py 프로젝트: kalyptorisk/daversy
class Copy(Command):
    __names__ = ['copy', 'cp']
    __usage__ = ['Copy the state from SOURCE to TARGET.']

    __args__ = ['SOURCE', 'TARGET']
    __options__ = [
        make_option('-f',
                    dest='filter',
                    help='apply a FILTER while reading the state'),
        make_option(
            '-i',
            dest='include_tags',
            default='all',
            metavar='TAGS',
            help=
            'include objects matching specified TAGS from filter (default: "all")'
        ),
        make_option(
            '-x',
            dest='exclude_tags',
            default='ignore',
            metavar='TAGS',
            help=
            'exclude objects matching specified TAGS from filter (default: "ignore")'
        ),
        make_option('-n',
                    dest='name',
                    help='rename the target state to specified NAME.'),
        make_option('-c',
                    dest='comment',
                    default='** dvs **',
                    help='use the given check-in comment (if applicable)'),
    ]

    def execute(self, args, options):
        filters = {}
        if options.filter:
            if not os.path.exists(options.filter):
                self.parser().error('filter: unable to open for reading')
            filters = create_filter(options.filter, options.include_tags,
                                    options.exclude_tags)

        input, output = args
        # load the source state
        saved_state = None
        for provider in PROVIDERS:
            if provider.can_load(input):
                saved_state = provider.load(input, filters)
                break
        else:
            self.parser().error('source: unable to open for reading')

        # (re)name it if applicable
        if options.name:
            saved_state.name = options.name
        elif saved_state.setdefault('name') is None:
            saved_state.name = get_uuid4()

        # save it to target state
        for provider in PROVIDERS:
            if provider.can_save(output):
                provider.save(saved_state, output, options.comment)
                break
        else:
            self.parser().error('target: unable to open for writing')
예제 #46
0
def set_python_main(option, opt_str, value, parser):
    """
	Set the main Python code; after contexts are initialized, main is ran.
	"""
    main = (value, option.python_loader)
    setattr(parser.values, option.dest, main)
    # only terminate parsing if not interspersing arguments
    if not parser.allow_interspersed_args:
        parser.rargs.insert(0, '--')


context = optparse.make_option(
    '-C',
    '--context',
    help=_('Python context code to run[file://,module:,<code>]'),
    dest='python_context',
    action='callback',
    callback=append_context,
    type='str')

module = optparse.make_option(
    '-m',
    help=_('Python module to run as script(__main__)'),
    dest='python_main',
    action='callback',
    callback=set_python_main,
    type='str')
module.python_loader = module_loader_descriptor

command = optparse.make_option('-c',
                               help=_('Python expression to run(__main__)'),
예제 #47
0
class Command(NoArgsCommand):
    option_list = NoArgsCommand.option_list + (
        make_option('--email',
                    dest='default_email',
                    default=DEFAULT_FAKE_EMAIL,
                    help='Use this as the new email format.'),
        make_option('-a',
                    '--no-admin',
                    action="store_true",
                    dest='no_admin',
                    default=False,
                    help='Do not change administrator accounts'),
        make_option('-s',
                    '--no-staff',
                    action="store_true",
                    dest='no_staff',
                    default=False,
                    help='Do not change staff accounts'),
        make_option('--include',
                    dest='include_regexp',
                    default=None,
                    help='Include usernames matching this regexp.'),
        make_option('--exclude',
                    dest='exclude_regexp',
                    default=None,
                    help='Exclude usernames matching this regexp.'),
        make_option(
            '--include-groups',
            dest='include_groups',
            default=None,
            help=
            'Include users matching this group. (use comma seperation for multiple groups)'
        ),
        make_option(
            '--exclude-groups',
            dest='exclude_groups',
            default=None,
            help=
            'Exclude users matching this group. (use comma seperation for multiple groups)'
        ),
    )
    help = '''DEBUG only: give all users a new email based on their account data ("%s" by default). Possible parameters are: username, first_name, last_name''' % (
        DEFAULT_FAKE_EMAIL, )
    requires_model_validation = False

    def handle_noargs(self, **options):
        if not settings.DEBUG:
            raise CommandError('Only available in debug mode')

        try:
            from django.contrib.auth import get_user_model  # Django 1.5
        except ImportError:
            from django_extensions.future_1_5 import get_user_model
        from django.contrib.auth.models import Group
        email = options.get('default_email', DEFAULT_FAKE_EMAIL)
        include_regexp = options.get('include_regexp', None)
        exclude_regexp = options.get('exclude_regexp', None)
        include_groups = options.get('include_groups', None)
        exclude_groups = options.get('exclude_groups', None)
        no_admin = options.get('no_admin', False)
        no_staff = options.get('no_staff', False)

        User = get_user_model()
        users = User.objects.all()
        if no_admin:
            users = users.exclude(is_superuser=True)
        if no_staff:
            users = users.exclude(is_staff=True)
        if exclude_groups:
            groups = Group.objects.filter(name__in=exclude_groups.split(","))
            if groups:
                users = users.exclude(groups__in=groups)
            else:
                raise CommandError("No group matches filter: %s" %
                                   exclude_groups)
        if include_groups:
            groups = Group.objects.filter(name__in=include_groups.split(","))
            if groups:
                users = users.filter(groups__in=groups)
            else:
                raise CommandError("No groups matches filter: %s" %
                                   include_groups)
        if exclude_regexp:
            users = users.exclude(username__regex=exclude_regexp)
        if include_regexp:
            users = users.filter(username__regex=include_regexp)
        for user in users:
            user.email = email % {
                'username': user.username,
                'first_name': user.first_name,
                'last_name': user.last_name
            }
            user.save()
        print('Changed %d emails' % users.count())
예제 #48
0
class Command(BaseCommand):
    help = 'Outputs the signed transaction hex for broadcasting ' + \
        '(distributing) coins to all Vote Cards.'

    option_list = BaseCommand.option_list + (
        make_option(
            '--html',
            dest='html',
            help='Genitrust Notes Bulk HTML format -- all files separated '
            'by commas.'),
        make_option('--input',
                    dest='input',
                    help='Source of bitcoin as the input address.'),
        make_option(
            '--size',
            dest='size',
            type=str,
            help='Distribution size in units (satoshi) per Vote Card.'),
        make_option(
            '--wif',
            dest='wif',
            help='The input public key\'s corresponding private key for '
            'signing the bitcoin transaction.'),
        make_option(
            '--change',
            dest='change',
            help='Change address to send the remaining voting credits.'),
    )

    def handle(self, *args, **options):
        self.stdout.write('html: %s' % options['html'].split(','))
        self.stdout.write('input: %s' % options['input'])
        self.stdout.write('size: %s' % options['size'])
        self.stdout.write('wif: %s' % options['wif'])
        self.stdout.write('change: %s' % options['change'])

        htmlFiles = options['html'].split(',')
        inputCoins = options['input']
        unitSize = options['size']
        signingWif = options['wif']
        changeAddress = options['change']

        # STEP 1: load all of the voting card bitcoin addresses from HTML files
        btcAddresses = []
        for htmlFile in htmlFiles:
            # TODO: use a function here to scrape the stuff.
            # this function should return a list of bitcoin addresses!!!
            btcAddresses += scrapeAddresses(htmlFile)
            #pass

        # STEP 2: build the transaction with all bitcoin addresses as the
        # transaction outputs.
        line = ['tx', '-i', inputCoins]
        # add each voting card bitcoin address to the command 'line', followed
        # by the amount of satoshi units.
        for address in btcAddresses:
            line += ['%s/%s' % (address, unitSize)]
        line += [changeAddress]
        # output of the unsigned transaction hex
        unsignedTxFile = '%s_%s.unsigned.tx' % (inputCoins, changeAddress)
        line += ['-o', unsignedTxFile]

        # STEP 3: finally -- build the transaction!
        # TODO:
        # use Popen to run this baby? maybe a better solution since we do not
        # need the text from the transaction creation...
        #self.stdout.write('Command for building the transaction: {}'.format(
        #    ' '.join(line)
        #))
        environment = (
            ('PYCOIN_CACHE_DIR', '~/.pycoin_cache'),
            ('PYCOIN_SERVICE_PROVIDERS',
             'BLOCKR_IO:BLOCKCHAIN_INFO:BITEASY:BLOCKEXPLORER'),
        )
        for (k, v) in environment:
            os.putenv(k, v)

        subprocess.call(line, shell=False, stdout=subprocess.PIPE)
        # STEP 4: sign the transaction, with the output going directly to
        # standard output.
        signedTxFile = str(unsignedTxFile).replace('unsigned', 'signed')
        #        line.pop()
        line = ['tx', str(unsignedTxFile), signingWif, '-o', signedTxFile]
        # TODO: send the 'line' to the system command line, and allow the output
        # to be displayed on the screen.
        #print subprocess.Popen("echo Hello World", shell=True, stdout=subprocess.PIPE).stdout.read()
        subprocess.call(line, shell=False, stdout=subprocess.PIPE)
        #print result
        line = ['tx', signedTxFile]
        result = []
        result = subprocess.Popen(line, shell=False, stdout=subprocess.PIPE)
        with result.stdout as f:
            output = f.read()
        return output
class Command(AppCommand):
    help = 'Seed your Django database with fake data'

    args = "[appname ...]"

    option_list = [
        make_option('--number',
                    dest='number',
                    default=10,
                    help='number of each model to seed (default 10)'),
    ]

    def add_arguments(self, parser):
        super(Command, self).add_arguments(parser)

        help_text = 'number of each model to seed (default 10)'
        parser.add_argument('--number',
                            nargs='?',
                            type=int,
                            default=10,
                            const=10,
                            help=help_text)

    def handle_app_config(self, app_config, **options):
        if app_config.models_module is None:
            raise SeederCommandError('You must provide an app to seed')

        try:
            number = int(options['number'])
        except ValueError:
            raise SeederCommandError(
                'The value of --number must be an integer')

        seeder = Seed.seeder()

        for model in self.sorted_models(app_config):
            seeder.add_entity(model, number)
            print('Seeding %i %ss' % (number, model.__name__))

        generated = seeder.execute()

        for model, pks in generated.items():
            for pk in pks:
                print("Model {} generated record with primary key {}".format(
                    model.__name__, pk))

    def dependencies(self, model):
        dependencies = set()

        for field in model._meta.get_fields():
            if field.many_to_one is True and field.concrete and field.blank is False:
                dependencies.add(field.related_model)

        return dependencies

    def sorted_models(self, app_config):
        dependencies = {}
        for model in app_config.get_models():
            dependencies[model] = self.dependencies(model)
        try:
            return toposort_flatten(dependencies)
        except ValueError as ex:
            raise SeederCommandError(str(ex))
예제 #50
0
class Command(BaseCommand):

    help = 'Esporta i crs zippati'
    encoding = 'utf-8'
    output_filename = 'CRS_6_{}.csv'
    logger = logging.getLogger('openaid')

    option_list = BaseCommand.option_list + (
        make_option('--years',
                    dest='years',
                    default='',
                    help='Years to fetch. Use one of this formats: 2012 or 2003-2006 or 2002,2004,2006'),
        make_option('--compress',
                    dest='compress',
                    action='store_true',
                    default=False,
                    help="Generate compressed zip archive"),
    )

    # mapping fields from CSV name to DB name
    field_map = OrderedDict([
        ('year', 'year'),
        ('donorcode', 'project__agency__donor__code'),
        ('donorname', 'project__agency__donor__name'),
        ('agencycode', 'project__agency__code'),
        ('agencyname', 'project__agency__name'),
        ('crsid', 'project__crsid'),
        ('projectnumber', 'number'),
        ('initialreport', 'report_type'),
        ('recipientcode', 'project__recipient__code'),
        ('recipientname', 'project__recipient__name'),
        ('regioncode', 'project__recipient__parent__code'),
        ('regioname', 'project__recipient__parent__name'),
        ('incomegroupcode', 'project__recipient__income_group'),
        ('flowcode', 'flow_type'),
        ('bi_multi', 'bi_multi'),
        ('finance_t', 'project__finance_type'),
        ('aid_t', 'project__aid_type'),
        ('usd_commitment', 'commitment_usd'),
        ('usd_disbursement', 'disbursement_usd'),
        ('commitment_national', 'commitment'),
        ('disbursement_national', 'disbursement'),
        ('shortdescription', 'project__description'),
        ('projecttitle', 'project__title'),
        ('purposecode', 'project__sector__code'),
        ('purposename', 'project__sector__name'),
        ('sectorcode', 'project__sector__parent__code'),
        ('sectorname', 'project__sector__parent__name'),
        ('channelcode', 'project__channel__code'),
        ('channelname', 'project__channel__name'),
        ('channelreportedname', 'channel_reported__name'),
        ('geography', 'geography'),
        ('expectedstartdate', 'expected_start_date'),
        ('completiondate', 'completion_date'),
        ('longdescription', 'long_description'),
        ('gender', 'project__markers__gender'),
        ('environment', 'project__markers__environment'),
        ('trade', 'project__markers__trade'),
        ('pdgg', 'project__markers__pd_gg'),
        ('FTC', 'is_ftc'),
        ('PBA', 'is_pba'),
        ('investmentproject', 'is_investment'),
        ('biodiversity', 'project__markers__biodiversity'),
        ('climateMitigation', 'project__markers__climate_mitigation'),
        ('climateAdaptation', 'project__markers__climate_adaptation'),
        ('desertification', 'project__markers__desertification'),
        ('commitmentdate', 'commitment_date'),
        ('numberrepayment', 'number_repayment'),
        ('grantelement', 'grant_element'),
        ('openaid id', 'project__pk'),
    ])

    # fields needed in the csv in the correct order
    csv_fieldset = OrderedDict([
        ('year', 'year'),
        ('donorcode', 'donorcode'),
        ('donorname', 'donorname'),
        ('agencycode', 'agencycode'),
        ('agencyname', 'agencyname'),
        ('crsid', 'crsid'),
        ('projectnumber', 'projectnumber'),
        ('initialreport', 'initialreport'),
        ('recipientcode', 'recipientcode'),
        ('recipientname', 'recipientname'),
        ('regioncode', 'regioncode'),
        ('regioname', 'regioname'),
        ('incomegroupcode', 'incomegroupcode'),
        ('incomegroupname', 'incomegroupname'),
        ('flowname', 'flowname'),
        ('bi_multi', 'bi_multi'),
        ('finance_t', 'finance_t'),
        ('aid_t', 'aid_t'),
        ('usd_commitment', 'usd_commitment'),
        ('usd_disbursement', 'usd_disbursement'),
        ('currencycode', 'currencycode'),
        ('commitment_national', 'commitment_national'),
        ('disbursement_national', 'disbursement_national'),
        ('shortdescription', 'shortdescription'),
        ('projecttitle', 'projecttitle'),
        ('purposecode', 'purposecode'),
        ('purposename', 'purposename'),
        ('sectorcode', 'sectorcode'),
        ('sectorname', 'sectorname'),
        ('channelcode', 'channelcode'),
        ('channelname', 'channelname'),
        ('channelreportedname', 'channelreportedname'),
        ('geography', 'geography'),
        ('expectedstartdate', 'expectedstartdate'),
        ('completiondate', 'completiondate'),
        ('longdescription', 'longdescription'),
        ('gender', 'gender'),
        ('environment', 'environment'),
        ('trade', 'trade'),
        ('pdgg', 'pdgg'),
        ('FTC', 'FTC'),
        ('PBA', 'PBA'),
        ('investmentproject', 'investmentproject'),
        ('biodiversity', 'biodiversity'),
        ('climateMitigation', 'climateMitigation'),
        ('climateAdaptation', 'climateAdaptation'),
        ('desertification', 'desertification'),
        ('commitmentdate', 'commitmentdate'),
        ('numberrepayment', 'numberrepayment'),
        ('grantelement', 'grantelement'),
        ('openaid id', 'openaid id'),
    ])

    def write_file(self, activity_set, filename):
        f = open(filename, "w")

        udw = UnicodeDictWriter(f, fieldnames=self.csv_fieldset.keys(), encoding=self.encoding)
        udw.writerow(self.csv_fieldset)

        for activity in activity_set:
            udw.writerow(activity)
        f.close()


    def date_to_string(self, date):
        date_format = '%d/%m/%Y'
        if date is not None:
            try:
                return datetime.strftime(date, date_format)
            except ValueError:
                self.logger.error("Wrong date value:{}".format(date))
                return ''
        return ''


    def manipulate(self, activity_set):
        # maps the field names for export using the field map (example: "pk" -> "openaid id")
        # adds "display name to few fields"
        # substitute "None" values with ""
        # adds "currencycode" field
        # converts date to string

        mapped_activities = []
        for activity in activity_set:

            mapped_activity = OrderedDict()
            # get income group displayname and flowname
            incomegroupname = ''
            flowname = ''
            if activity['project__recipient__income_group'] != None and activity[
                'project__recipient__income_group'] != '':
                incomegroupname = Recipient.INCOME_GROUPS[activity['project__recipient__income_group']]

            if activity['flow_type'] != None and activity['flow_type'] != '':
                flowname = Activity.FLOW_TYPES[activity['flow_type']]

            # convert dates to string

            activity['expected_start_date'] = self.date_to_string(activity['expected_start_date'])
            activity['completion_date'] = self.date_to_string(activity['completion_date'])
            activity['commitment_date'] = self.date_to_string(activity['commitment_date'])

            for csv_key in self.csv_fieldset.keys():
                value = ''
                db_key = self.field_map.get(csv_key,None)
                if db_key is None:
                    if csv_key == 'currencycode':
                        value = '918'
                    elif csv_key == 'incomegroupname':
                        value = incomegroupname
                    elif csv_key == 'flowname':
                        value = flowname
                    else:
                        raise Exception

                else:
                    value = activity[db_key]

                    if db_key in ['commitment_usd','disbursement_usd','disbursement','commitment'] and type(value) is float:

                        #use different annotation for money values
                        value = format(value, '.12f')

                if value is None:
                    value = u''
                elif value is True:
                    value = u'1'
                elif value is False:
                    value = u'0'

                if type(value) is int or type(value) is float:
                    value = str(value)

                mapped_activity[csv_key] = value

            mapped_activities.append(mapped_activity)
        return mapped_activities

    def compress(self, filename):

        zipfilename = filename+".zip"
        self.logger.info("Compressed file {}".format(zipfilename))
        with zipfile.ZipFile(zipfilename, 'w', zipfile.ZIP_DEFLATED) as myzip:
            myzip.write(filename)

    def export(self, year, compress):
        # gets activity from DB, manipulates data, wrties to file
        activity_set = Activity.objects.all().order_by('year','crsid')

        activity_set = activity_set.filter(year=int(year))

        activity_set = activity_set.values(*self.field_map.values())

        activity_set = self.manipulate(activity_set)
        self.logger.info("Exported {} lines".format(len(activity_set)))
        filename = self.output_filename.format(year)
        self.write_file(activity_set, filename)
        if compress is True:
            self.compress(filename)


    def handle(self, *args, **options):

        ###
        # years
        ###
        years = options['years']
        compress = options['compress']

        if not years:
            raise Exception("Missing years parameter")

        if "-" in years:
            (start_year, end_year) = years.split("-")
            years = range(int(start_year), int(end_year) + 1)
        else:
            years = [int(y.strip()) for y in years.split(",") if 2001 < int(y.strip()) < 2020]

        if not years:
            raise Exception("No suitable year found in {0}".format(years))

        self.logger.info("Processing years: {0}".format(years))

        for year in years:
            try:
                self.export(year, compress)
            except KeyboardInterrupt:
                self.logger.error("Command execution aborted.")

        self.logger.info("Finished exporting")
예제 #51
0
파일: L10n.py 프로젝트: xuanxu11/PyLucid
class Command(BaseCommand):
    option_list = BaseCommand.option_list + (
        make_option('--locale', '-l', default=None, dest='locale',
            help='Creates or updates the message files for the given locale (e.g. pt_BR) or for all.'),
    )
    help = (
        'Creates (or updates) .po messages'
        ' or compiles .po files to .mo files'
        ' for use with builtin gettext support.'
    )
    args = "%s/%s [app_name-1, app_name-2 ... app_name-n]" % (MAKE_MESSAGES, COMPILE_MESSAGES)



    requires_model_validation = False
    can_import_settings = False

    def _pylucid_app_names(self):
        def is_own_project(app_name):
            for own_project in OWN_PROJECTS:
                if own_project in app_name:
                    return True
            return False

        app_names = [n for n in settings.INSTALLED_APPS if is_own_project(n)]
        return app_names

    def handle(self, *args, **options):
        self.verbosity = int(options.get('verbosity'))
        locale = options.get('locale')
        if locale is None:
            process_all = True
        else:
            process_all = False

        if len(args) == 0:
            raise CommandError("missing '%s' or '%s' argument!" % (MAKE_MESSAGES, COMPILE_MESSAGES))

        cmd_type = args[0]
        if cmd_type not in (MAKE_MESSAGES, COMPILE_MESSAGES):
            raise CommandError("First argument must be '%s' or '%s' !" % (MAKE_MESSAGES, COMPILE_MESSAGES))

        pylucid_app_names = self._pylucid_app_names()

        if len(args) > 1:
            only_apps = args[1:]
            sys.stdout.write("%s only the apps: %s" % (cmd_type, repr(only_apps)))

            app_names = []
            for app_name in only_apps:
                if app_name in pylucid_app_names:
                    app_names.append(app_name)
                else:
                    app_name = ".%s" % app_name
                    full_app_name = None
                    for app_name2 in pylucid_app_names:
                        if app_name2.endswith(app_name):
                            full_app_name = app_name2
                            break
                    if full_app_name is None:
                        sys.stderr.write("App with name %r is unknown or not a PyLucid app!" % app_name)
                    else:
                        app_names.append(full_app_name)
            if not app_names:
                raise CommandError("No valid PyLucid apps found!")
        else:
            app_names = pylucid_app_names

        for app_name in app_names:
            print "_"*79
            print "%s: %s" % (cmd_type, app_name)
            app_module = import_module(app_name)
            app_path = os.path.dirname(app_module.__file__)

            os.chdir(app_path)

            if cmd_type == COMPILE_MESSAGES:
                try:
                    compile_messages(self.stderr)
                except Exception:
                    print traceback.format_exc()
            elif cmd_type == MAKE_MESSAGES:
                try:
                    make_messages(
                        locale=locale,
                        domain="django",
                        verbosity=self.verbosity,
                        all=process_all,
                        extensions=[".html", ],
                        symlinks=True,
                        ignore_patterns=['CVS', '.*', '*~'],
                        no_wrap=False,
                        no_obsolete=True,
                    )
                except Exception:
                    print traceback.format_exc()
            else:
                raise
예제 #52
0
class Command(BaseCommand):
    args = '<MyTardis Exp ID>'
    help = 'Delete the supplied MyTardis Experiment ID'
    option_list = BaseCommand.option_list + (make_option(
        '--list',
        action='store_true',
        dest='list',
        default=False,
        help="Only list the experiment to be deleted, don't actually delete"
    ), ) + (make_option('--confirmed',
                        action='store_true',
                        dest='confirmed',
                        default=False,
                        help="Don't ask the user, just do the deletion"), )

    def handle(self, *args, **options):
        if len(args) != 1:
            raise CommandError("Expected exactly 1 argument - Experiment ID")
        try:
            exp = Experiment.objects.get(pk=int(args[0]))
        except Experiment.DoesNotExist:
            raise CommandError("Experiment ID %s not found" % args[0])

        # FIXME - we are fetch a bunch of stuff outside of any transaction, and then
        # doing the deletes in a transaction.  There is an obvious race condition here
        # that may result in components of an experiment not being deleted or being deleted
        # when they shouldn't be.

        # Fetch Datasets and Datafiles and work out which ones would be deleted
        datasets = Dataset.objects.filter(experiments__id=exp.id)
        datafiles = DataFile.objects.filter(
            dataset__id__in=map((lambda ds: ds.id), datasets))
        uniqueDatasets = filter((lambda ds: ds.experiments.count() == 1),
                                datasets)
        uniqueDatasetIds = map((lambda ds: ds.id), uniqueDatasets)
        uniqueDatafiles = filter(
            (lambda df: df.dataset.id in uniqueDatasetIds), datafiles)

        # Fetch other stuff to be printed and deleted.
        acls = ObjectACL.objects.filter(content_type=exp.get_ct(),
                                        object_id=exp.id)
        authors = ExperimentAuthor.objects.filter(experiment=exp)
        epsets = ExperimentParameterSet.objects.filter(experiment=exp)

        confirmed = options.get('confirmed', False)
        listOnly = options.get('list', False)
        if not listOnly and not confirmed:
            self.stdout.write("Delete the following experiment?\n\n")

        if listOnly or not confirmed:
            # Print basic experiment information
            self.stdout.write("Experiment\n    ID: {0}\n".format(exp.id))
            self.stdout.write("    Title: {0}\n".format(exp.title))
            self.stdout.write("    Locked: {0}\n".format(exp.locked))
            self.stdout.write("    Public Access: {0}\n".format(
                exp.public_access))

            # List experiment authors
            self.stdout.write("    Authors:\n")
            for author in authors:
                self.stdout.write("        {0}\n".format(author.author))

            # List experiment metadata
            for epset in epsets:
                self.stdout.write("    Param Set: {0} - {1}\n".format(
                    epset.schema.name, epset.schema.namespace))
                params = ExperimentParameter.objects.filter(parameterset=epset)
                for param in params:
                    self.stdout.write("        {0} = {1}\n".format(
                        param.name.full_name, param.get()))

            # List experiment ACLs
            self.stdout.write("    ACLs:\n")
            for acl in acls:
                self.stdout.write("        {0}-{1}, flags: ".format(
                    acl.pluginId, acl.entityId))
                if acl.canRead:
                    self.stdout.write("R")
                if acl.canWrite:
                    self.stdout.write("W")
                if acl.canDelete:
                    self.stdout.write("D")
                if acl.isOwner:
                    self.stdout.write("O")
                self.stdout.write("\n")

            # Basic Statistics
            self.stdout.write(
                "    {0} total dataset(s), containing {1} file(s)\n".format(
                    datasets.count(), datafiles.count()))
            self.stdout.write(
                "    {0} non-shared dataset(s), containing {1} file(s)\n".
                format(len(uniqueDatasets), len(uniqueDatafiles)))
            if len(uniqueDatasets) > 0 and not listOnly:
                self.stdout.write(
                    "        (The non-shared datasets and files will be deleted)\n"
                )

        # If the user has only requested a listing finish now
        if listOnly:
            return

        if not confirmed:
            # User must enter "yes" to proceed
            self.stdout.write("\n\nConfirm Deletion? (yes): ")
            ans = sys.stdin.readline().strip()
            if ans != "yes":
                self.stdout.write("'yes' not entered, aborting.\n")
                return

        # Consider the entire experiment deletion atomic
        using = options.get('database', DEFAULT_DB_ALIAS)

        try:
            with transaction.atomic(using=using):
                acls.delete()
                epsets.delete()
                for dataset in datasets:
                    dataset.experiments.remove(exp.id)
                    if dataset.experiments.count() == 0:
                        DatasetParameterSet.objects.filter(
                            dataset=dataset).delete()
                        for datafile in DataFile.objects.filter(
                                dataset=dataset):
                            DatafileParameterSet.objects.filter(
                                datafile=datafile).delete()
                            datafile.delete()
                        dataset.delete()
                authors.delete()
                exp.delete()
        except Exception:
            exc_class, exc, tb = sys.exc_info()
            new_exc = CommandError(
                "Exception %s has occurred: rolled back transaction" %
                (exc or exc_class))
            raise new_exc.__class__, new_exc, tb
예제 #53
0
class Command(BaseCommand):
    help = "synchronize local tables to PeeringDB"

    option_list = BaseCommand.option_list + (
        make_option('-n',
                    '--dry-run',
                    action='store_true',
                    default=False,
                    help='enable extra debug output'),
        make_option('--debug',
                    action='store_true',
                    default=False,
                    help='enable extra debug output'),
        make_option('--only',
                    action='store',
                    default=False,
                    help='only process this ixp (id)'),
    )

    # progress
    # quiet

    def handle(self, *args, **options):
        self.log = logging.getLogger('peeringdb.sync')

        kwargs = {}
        if settings.SYNC_USERNAME:
            kwargs['user'] = settings.SYNC_USERNAME
            kwargs['password'] = settings.SYNC_PASSWORD

        self.connect(settings.SYNC_URL, **kwargs)

        # get models if limited by config
        tables = self.get_class_list(settings.SYNC_ONLY)

        # disable auto now
        for model in tables:
            for field in model._meta.fields:
                if field.name == "created":
                    field.auto_now_add = False
                if field.name == "updated":
                    field.auto_now = False

        self.sync(tables)

    def connect(self, url, **kwargs):
        self.rpc = RestClient(url, **kwargs)

    def sync(self, tables):
        for cls in tables:
            self.update_db(cls, self.get_objs(cls))

    def get_class_list(self, only=None):
        tables = []
        if only:
            for name in only:
                tables.append(get_model(name))
        else:
            tables = django_peeringdb.models.all_models
        return tables

    def get_since(self, cls):
        upd = cls.handleref.last_change()
        if upd:
            return int(time.mktime(upd.timetuple()))
        return 0

    def get_data(self, cls, since):
        return self.rpc.all(cls._handleref.tag, since=since)

    def get_objs(self, cls, **kwargs):
        since = self.get_since(cls)

        data = self.rpc.all(cls._handleref.tag, since=since, **kwargs)
        #data = self.rpc.all(cls._handleref.tag, since=since, limit=20)
        print("%s last update %s %d changed" %
              (cls._handleref.tag, str(since), len(data)))
        #print(data)
        return data

    def cls_from_tag(self, tag):
        tables = self.get_class_list()
        for cls in tables:
            if cls._handleref.tag == tag:
                return cls
        raise Exception("Unknown reftag: %s" % tag)

    def _sync(self, cls, row):
        """
        Try to sync an object to the local database, in case of failure
        where a referenced object is not found, attempt to fetch said 
        object from the REST api
        """
        try:
            sync.sync_obj(cls, row)
        except django.core.exceptions.ValidationError, inst:
            # There were validation errors
            for field, errlst in inst.error_dict.items():
                # check if it was a relationship that doesnt exist locally
                m = re.match(".+ with id (\d+) does not exist.+", str(errlst))
                if m:
                    print(
                        "%s.%s not found locally, trying to fetch object... " %
                        (field, m.group(1)))
                    # fetch missing object
                    r = self.rpc.get(field, int(m.group(1)), depth=0)

                    # sync missing object
                    self._sync(self.cls_from_tag(field), r[0])
                else:
                    raise

            # try to sync initial object once more
            sync.sync_obj(cls, row)
예제 #54
0
def Options():
    OptionList = [
        make_option(
            "-s",
            "--source-code",
            dest="FileType",
            const="SourceCode",
            action="store_const",
            help=
            "The input file is preprocessed source code, including C or assembly code"
        ),
        make_option("-r",
                    "--vfr-file",
                    dest="FileType",
                    const="Vfr",
                    action="store_const",
                    help="The input file is preprocessed VFR file"),
        make_option("--Vfr-Uni-Offset",
                    dest="FileType",
                    const="VfrOffsetBin",
                    action="store_const",
                    help="The input file is EFI image"),
        make_option("--asl-deps",
                    dest="AslDeps",
                    const="True",
                    action="store_const",
                    help="Generate Asl dependent files."),
        make_option("-a",
                    "--asl-file",
                    dest="FileType",
                    const="Asl",
                    action="store_const",
                    help="The input file is ASL file"),
        make_option("--asm-file",
                    dest="FileType",
                    const="Asm",
                    action="store_const",
                    help="The input file is asm file"),
        make_option(
            "-c",
            "--convert-hex",
            dest="ConvertHex",
            action="store_true",
            help="Convert standard hex format (0xabcd) to MASM format (abcdh)"
        ),
        make_option("-l",
                    "--trim-long",
                    dest="TrimLong",
                    action="store_true",
                    help="Remove postfix of long number"),
        make_option(
            "-i",
            "--include-path-file",
            dest="IncludePathFile",
            help=
            "The input file is include path list to search for ASL include file"
        ),
        make_option("-o",
                    "--output",
                    dest="OutputFile",
                    help="File to store the trimmed content"),
        make_option("--ModuleName",
                    dest="ModuleName",
                    help="The module's BASE_NAME"),
        make_option("--DebugDir",
                    dest="DebugDir",
                    help="Debug Output directory to store the output files"),
        make_option("-v",
                    "--verbose",
                    dest="LogLevel",
                    action="store_const",
                    const=EdkLogger.VERBOSE,
                    help="Run verbosely"),
        make_option("-d",
                    "--debug",
                    dest="LogLevel",
                    type="int",
                    help="Run with debug information"),
        make_option("-q",
                    "--quiet",
                    dest="LogLevel",
                    action="store_const",
                    const=EdkLogger.QUIET,
                    help="Run quietly"),
        make_option("-?",
                    action="help",
                    help="show this help message and exit"),
    ]

    # use clearer usage to override default usage message
    UsageString = "%prog [-s|-r|-a|--Vfr-Uni-Offset] [-c] [-v|-d <debug_level>|-q] [-i <include_path_file>] [-o <output_file>] [--ModuleName <ModuleName>] [--DebugDir <DebugDir>] [<input_file>]"

    Parser = OptionParser(description=__copyright__,
                          version=__version__,
                          option_list=OptionList,
                          usage=UsageString)
    Parser.set_defaults(FileType="Vfr")
    Parser.set_defaults(ConvertHex=False)
    Parser.set_defaults(LogLevel=EdkLogger.INFO)

    Options, Args = Parser.parse_args()

    # error check
    if Options.FileType == 'VfrOffsetBin':
        if len(Args) == 0:
            return Options, ''
        elif len(Args) > 1:
            EdkLogger.error("Trim",
                            OPTION_NOT_SUPPORTED,
                            ExtraData=Parser.get_usage())
    if len(Args) == 0:
        EdkLogger.error("Trim", OPTION_MISSING, ExtraData=Parser.get_usage())
    if len(Args) > 1:
        EdkLogger.error("Trim",
                        OPTION_NOT_SUPPORTED,
                        ExtraData=Parser.get_usage())

    InputFile = Args[0]
    return Options, InputFile
예제 #55
0
class Command(BaseCommand):
    help = 'Generate ABP fixtures from typology'

    option_list = BaseCommand.option_list + (
        make_option('--typology',
                    action='store',
                    dest='typology',
                    default="",
                    help='The file typology.json'),
        make_option('--category_classes',
                    action='store',
                    dest='category_classes',
                    default="",
                    help='Category classes to generate (ids comma separated)'),
        make_option('--dump',
                    action='store',
                    dest='dump',
                    default="",
                    help='Dump to generate (values are fms or fmsproxy)'),
    )

    idx_reportsecondarycategoryclass = 9
    idx_reportsubcategory = 0
    idx_reportcategory = 150

    abp_entity_id = 23
    abp_group_id = 300
    abp_groupmail_id = 200
    abp_user_id = 10673

    def handle(self, *args, **options):
        logger.info('Loading file %s' % options['typology'])

        fixtures_fms = []
        fixtures_fmsproxy = []

        subnature_ids = []
        bagtype_ids = []

        nature_type_ids = {}
        types_bagtypes_flag = {}

        translations_corrections = {
            u"...autre...": u"Propreté",
            u"...andere...": u"Netheid",
            u"autre (Inclassable)": u"Inclassable",
            u"andere (Onklasseerbaar)": u"Onklasseerbaar"
        }

        with open(options['typology']) as json_data:
            typology = json.load(json_data)
            logger.info('Typology file loaded')

            logger.info('Processing types')
            idx = 0
            types_ids = {}
            for typology_type in typology['data']['types']:
                types_ids[typology_type[
                    'type_id']] = self.idx_reportsecondarycategoryclass + idx

                try:
                    name_fr = translations_corrections[typology_type['label']
                                                       ['fr']]
                    name_nl = translations_corrections[typology_type['label']
                                                       ['nl']]
                except KeyError:
                    name_fr = typology_type['label']['fr']
                    name_nl = typology_type['label']['nl']

                reportsecondarycategoryclass = {
                    "pk": types_ids[typology_type['type_id']],
                    "model": "fixmystreet.reportsecondarycategoryclass",
                    "fields": {
                        "name_en": name_fr,
                        "name_fr": name_fr,
                        "name_nl": name_nl
                    }
                }
                fixtures_fms.append(reportsecondarycategoryclass)

                # Map natures id's and type id's
                for nature in typology_type['nature_ids']:
                    nature_type_ids[nature] = types_ids[
                        typology_type['type_id']]

                # Flag if bagtypes
                for bagtype in typology_type['bagtype_ids']:
                    types_bagtypes_flag[types_ids[
                        typology_type['type_id']]] = True

                # FMSProxy
                fmsproxy_type = {
                    "fields": {
                        "fms_id": types_ids[typology_type['type_id']],
                        "abp_id": typology_type['type_id']
                    },
                    "model": "abp.type",
                    "pk": idx
                }
                fixtures_fmsproxy.append(fmsproxy_type)

                idx = idx + 1

                logger.info('Processing subnatures')
                idx = 1
                subnatures_ids = {}
                for typology_subnature in typology['data']['subnatures']:
                    subnatures_ids[typology_subnature[
                        'subnature_id']] = self.idx_reportsubcategory + idx

                    try:
                        name_fr = translations_corrections[
                            typology_subnature['label']['fr']]
                        name_nl = translations_corrections[
                            typology_subnature['label']['nl']]
                    except KeyError:
                        name_fr = typology_subnature['label']['fr']
                        name_nl = typology_subnature['label']['nl']

                    reportsubcategory = {
                        "pk":
                        subnatures_ids[typology_subnature['subnature_id']],
                        "model": "fixmystreet.reportsubcategory",
                        "fields": {
                            "name_en": name_fr,
                            "name_fr": name_fr,
                            "name_nl": name_nl
                        }
                    }
                    fixtures_fms.append(reportsubcategory)
                    subnature_ids.append(
                        subnatures_ids[typology_subnature['subnature_id']])

                    # FMSProxy
                    fmsproxy_subnature = {
                        "fields": {
                            "fms_id":
                            subnatures_ids[typology_subnature['subnature_id']],
                            "abp_id":
                            typology_subnature['subnature_id']
                        },
                        "model": "abp.subnature",
                        "pk": idx
                    }
                    fixtures_fmsproxy.append(fmsproxy_subnature)

                    idx = idx + 1

            logger.info('Processing bagtypes')
            idx = 1
            bagtypes_ids = {}
            for typology_bagtype in typology['data']['bagtypes']:
                bagtypes_ids[typology_bagtype[
                    'bagtype_id']] = self.idx_reportsubcategory + idx

                try:
                    name_fr = translations_corrections[
                        typology_bagtype['label']['fr']]
                    name_nl = translations_corrections[
                        typology_bagtype['label']['nl']]
                except KeyError:
                    name_fr = typology_bagtype['label']['fr']
                    name_nl = typology_bagtype['label']['nl']

                reportsubcategory = {
                    "pk": bagtypes_ids[typology_bagtype['bagtype_id']],
                    "model": "fixmystreet.reportsubcategory",
                    "fields": {
                        "name_en": name_fr,
                        "name_fr": name_fr,
                        "name_nl": name_nl
                    }
                }
                fixtures_fms.append(reportsubcategory)
                bagtype_ids.append(
                    bagtypes_ids[typology_bagtype['bagtype_id']])

                # FMSProxy
                fmsproxy_bagtype = {
                    "fields": {
                        "fms_id": bagtypes_ids[typology_bagtype['bagtype_id']],
                        "abp_id": typology_bagtype['bagtype_id']
                    },
                    "model": "abp.bagtype",
                    "pk": idx
                }
                fixtures_fmsproxy.append(fmsproxy_bagtype)

                idx = idx + 1

            if options['category_classes']:
                category_classes = options['category_classes'].split(',')

                logger.info('Processing natures')
                idx = 0
                natures_ids = {}
                for typology_nature in typology['data']['natures']:

                    for category_class in category_classes:
                        natures_ids[typology_nature[
                            'nature_id']] = self.idx_reportcategory + idx

                        try:
                            name_fr = translations_corrections[
                                typology_nature['label']['fr']]
                            name_nl = translations_corrections[
                                typology_nature['label']['nl']]
                        except KeyError:
                            name_fr = typology_nature['label']['fr']
                            name_nl = typology_nature['label']['nl']

                        reportcategory = {
                            "pk": natures_ids[typology_nature['nature_id']],
                            "model": "fixmystreet.reportcategory",
                            "fields": {
                                "name_en":
                                name_fr,
                                "name_fr":
                                name_fr,
                                "name_nl":
                                name_nl,
                                "public":
                                True,
                                "organisation_regional":
                                self.abp_entity_id,
                                "organisation_communal":
                                self.abp_entity_id,
                                "category_class":
                                int(category_class),
                                "secondary_category_class":
                                nature_type_ids[typology_nature['nature_id']]
                            },
                        }

                        # Set bagtypes if needed
                        try:
                            if types_bagtypes_flag[nature_type_ids[
                                    typology_nature['nature_id']]]:
                                reportcategory['fields'][
                                    'sub_categories'] = bagtype_ids
                        except KeyError:
                            pass

                        fixtures_fms.append(reportcategory)

                        # FMSProxy
                        fmsproxy_nature = {
                            "fields": {
                                "fms_id":
                                natures_ids[typology_nature['nature_id']],
                                "abp_id": typology_nature['nature_id']
                            },
                            "model": "abp.nature",
                            "pk": idx
                        }
                        fixtures_fmsproxy.append(fmsproxy_nature)

                        idx = idx + 1

            # Add ABP Entity
            fixtures_fms.append({
                "fields": {
                    "name_en": "Bruxelles-Propreté",
                    "name_fr": "Bruxelles-Propreté",
                    "name_nl": "Net Brussels",
                    "email": "*****@*****.**",
                    "phone": "0800 981 81",
                    "active": True,
                    "type": "R",
                },
                "model": "fixmystreet.organisationentity",
                "pk": self.abp_entity_id
            })

            # Add ABP Group and generate dispatching for it
            fixtures_fms.append({
                "fields": {
                    "name_en": "Bruxelles-Propreté Group",
                    "name_fr": "Bruxelles-Propreté Groupe",
                    "name_nl": "Net Brussels Group",
                    "created": datetime.datetime.now(),
                    "dispatch_categories": natures_ids.values(),
                    "dependency": self.abp_entity_id,
                    "modified": datetime.datetime.now(),
                    "email": "*****@*****.**",
                    "phone": "0800 981 81",
                    "active": True,
                    "slug_fr": "Bruxelles-Propreté Groupe",
                    "type": "D",
                    "slug_nl": "Net Brussels Group"
                },
                "model": "fixmystreet.organisationentity",
                "pk": self.abp_group_id
            })

            # Add GroupMailConfig for Abp Group
            fixtures_fms.append({
                "fields": {
                    "notify_members": False,
                    "group": self.abp_group_id,
                    "digest_closed": False,
                    "digest_other": False,
                    "digest_created": False,
                    "digest_inprogress": False,
                    "notify_group": False
                },
                "model": "fixmystreet.groupmailconfig",
                "pk": self.abp_groupmail_id
            })

            # Fix abp user entity
            fixtures_fms.append({
                "fields": {
                    "applicant": False,
                    "logical_deleted": False,
                    "organisation": self.abp_entity_id,
                    "telephone": "0800 981 81",
                    "agent": False,
                    "contractor": False,
                    "manager": True,
                    "groups": [],
                    "modified": datetime.datetime.now(),
                    "user_permissions": [],
                    "quality": None,
                    "leader": False
                },
                "model": "fixmystreet.fmsuser",
                "pk": self.abp_user_id
            })

            # Create membership to group
            fixtures_fms.append({
                "fields": {
                    "created": datetime.datetime.now(),
                    "organisation": self.abp_group_id,
                    "contact_user": True,
                    "modified": datetime.datetime.now(),
                    "user": self.abp_user_id
                },
                "model":
                "fixmystreet.userorganisationmembership"
            })

        if options['dump'] == 'fms':
            logger.info('Dump fixtures fms')
            print json.dumps(fixtures_fms, indent=4, default=self.date_handler)

        if options['dump'] == 'fmsproxy':
            logger.info('Dump fixtures fmsproxy')
            print json.dumps(fixtures_fmsproxy, indent=4)

    def date_handler(self, obj):
        if hasattr(obj, 'isoformat'):
            return obj.isoformat()
        else:
            raise TypeError
예제 #56
0
class Command(BaseCommand):

    help = "Render a wiki document"
    option_list = BaseCommand.option_list + (
        make_option('--baseurl',
                    dest="baseurl",
                    default=False,
                    help="Base URL to site"),
        make_option('--force',
                    action="store_true",
                    dest="force",
                    default=False,
                    help="Force rendering, first clearing record of any "
                    "rendering in progress"),
        make_option('--nocache',
                    action="store_true",
                    dest="nocache",
                    default=False,
                    help="Use Cache-Control: no-cache instead of max-age=0"),
        make_option('--defer',
                    action="store_true",
                    dest="defer",
                    default=False,
                    help="Defer rendering"),
    )

    def handle(self, *args, **options):

        base_url = options['baseurl']
        if not base_url:
            from django.contrib.sites.models import Site
            site = Site.objects.get_current()
            base_url = 'http://%s' % site.domain

        path = args[0]
        if path.startswith('/'):
            path = path[1:]
        locale, sep, slug = path.partition('/')
        head, sep, tail = slug.partition('/')
        if head == 'docs':
            slug = tail

        doc = Document.objects.get(locale=locale, slug=slug)

        if options['force']:
            doc.render_started_at = None

        if options['nocache']:
            cc = 'no-cache'
        else:
            cc = 'max-age=0'

        if options['defer']:
            logging.info("Queuing deferred render for %s (%s)" %
                         (doc, doc.get_absolute_url()))
            render_document.delay(doc, cc, base_url)
            logging.info("Queued.")

        else:
            logging.info("Rendering %s (%s)" % (doc, doc.get_absolute_url()))
            try:
                render_document(doc, cc, base_url)
                logging.info("DONE.")
            except DocumentRenderingInProgress:
                logging.error("Rendering is already in progress for this "
                              "document")
예제 #57
0
class Command(BaseCommand):
    args = ''
    help = 'Importeer dino zipfile'
    option_list = BaseCommand.option_list + (
        make_option('--net',
                    action='store',
                    type='int',
                    dest='netid',
                    help='netwerk id',
                    default=1),
        make_option('--file',
                    action='store',
                    type='string',
                    dest='dino',
                    help='zipfile van dinoloket',
                    default=None),
    )

    def handle(self, *args, **options):
        fname = options.get('dino')
        if fname:
            dino = Dino()
            netid = options.get('netid')
            network, created = Network.objects.get_or_create(pk=netid)
            network.well_set.filter(nitg=None).delete()
            for f, d in dino.iter_zip(fname):
                try:
                    name = d['Locatie']
                    nr = parsenum(d['Filternummer'], int, 1)
                    try:
                        datum = datetime.datetime.strptime(
                            d['Datum maaiveld gemeten'], '%d-%m-%Y')
                    except:
                        datum = datetime.date.today()
                    x = float(d['X-coordinaat'])
                    y = float(d['Y-coordinaat'])
                    loc = Point(x, y)
                    maaiveld = parsenum(d['Maaiveld (cm t.o.v. NAP)']) / 100
                    refpnt = parsenum(d['Meetpunt (cm t.o.v. NAP)']) / 100
                    # replace existing well
                    #network.well_set.filter(nitg=name).delete()
                    well, created = network.well_set.get_or_create(
                        nitg=name,
                        defaults={
                            'name': name,
                            'maaiveld': maaiveld,
                            'refpnt': refpnt,
                            'location': loc,
                            'date': datum
                        })
                    top = parsenum(d['Bovenkant filter (cm t.o.v. NAP)']) / 100
                    bottom = parsenum(
                        d['Onderkant filter (cm t.o.v. NAP)']) / 100
                    filter, newfilter = well.screen_set.get_or_create(
                        nr=nr, defaults={
                            'top': top,
                            'bottom': bottom
                        })
                    print filter
#                     for s in d['standen']:
#                         try:
#                             date = s[2]
#                             value = s[5]
#                             if len(date)>0 and len(value)>0:
#                                 peildatum = datetime.datetime.strptime(date+' 12:00:00','%d-%m-%Y %H:%M:%S')
#                                 standnap = float(value) / 100
#                                 if newfilter:
#                                     filter.datapoint_set.create(date=peildatum,level=standnap)
#                                 else:
#                                     filter.datapoint_set.get_or_create(date=peildatum,defaults={'level': standnap})
#                         except Exception as e:
#                             print name, nr, e
                except Exception as e:
                    print name, nr, e
예제 #58
0
class Command(BaseCommand):
    # args = ''
    option_list = BaseCommand.option_list + (
        make_option('--download',
            action='store_true'),
        make_option('--insert',
            action='store_true'),
    )
    help = 'Parsing irr.by realty'

    @transaction.commit_manually
    def handle(self, *args, **options):
        import sys
        sys.path.append("/home/bkmz/Dev/realty_parser/src")
        from analytics import insert as insert_irr

        mongo_objects = []

        print "Start Truncating"
        # Ad.objects.all().delete()
        Advert.objects.all().delete()
        Advert._get_db().mongoengine.counters.remove({})
        print "Truncating finished"

        COUNT = 0
        for x in insert_irr():
            # print x['url']

            # try:
            #     current_region = Region.objects.filter(name=x['region']).get()
            # except Region.DoesNotExist:
            #     print "Region not found! Skip ad"
            #     import ipdb; ipdb.set_trace()
            #     continue

            # Advert(floor=2).save()
            if u"Адрес" in x.keys() and not x[u'Адрес'].strip() == "":
                # x['address'] = x[u'Адрес']
                del x[u'Адрес']

            # if x['address'] == "":
            #     import ipdb; ipdb.set_trace()

            all = set(KEYS)
            all2 = set(VALUES)
            adv = set(x.keys())

            old_keys       = list((adv & all2))
            converted_keys = [DICT2[xi] for xi in (adv & all2)]


            nonrel_adv = x

            for key1 in x.keys():
                if key1 in DICT2:
                    nonrel_adv[DICT2[key1]] = x[key1]
                    del nonrel_adv[key1]


            # nonrel_adv['region'] = int(current_region.pk)
            nonrel_adv['region'] = x['region'].strip()

            ad_nonrel_obj = Advert(**nonrel_adv)
            # ad_nonrel_obj.save()

            mongo_objects.append(ad_nonrel_obj)

            # ad_nonrel_obj.save()

            print COUNT
            COUNT += 1

            # if COUNT >= 1000:
                # break
            

            # print x['adding_date']



            # import ipdb; ipdb.set_trace()
            # break
        
        Advert.objects.insert(mongo_objects)
        transaction.commit()
예제 #59
0
class BaseImporterCommand(BaseCommand):
    """
    Base importer command for blogging platform specific management
    commands to subclass when importing blog posts into Mezzanine.
    The ``handle_import`` method should be overridden to provide the
    import mechanism specific to the blogging platform being dealt with.
    """

    option_list = BaseCommand.option_list + (
        make_option("-m", "--mezzanine-user", dest="mezzanine_user",
            help="Mezzanine username to assign the imported blog posts to."),
        make_option("--noinput", action="store_false", dest="interactive",
            default=True, help="Do NOT prompt for input of any kind. "
                               "Fields will be truncated if too long."),
        make_option("-n", "--navigation", action="store_true",
            dest="in_navigation", help="Add any imported pages to navigation"),
        make_option("-f", "--footer", action="store_true", dest="in_footer",
            help="Add any imported pages to footer navigation"),
    )

    def __init__(self, **kwargs):
        self.posts = []
        self.pages = []
        super(BaseImporterCommand, self).__init__(**kwargs)

    def add_post(self, title=None, content=None, old_url=None, pub_date=None,
                 tags=None, categories=None, comments=None):
        """
        Adds a post to the post list for processing.

        - ``title`` and ``content`` are strings for the post.
        - ``old_url`` is a string that a redirect will be created for.
        - ``pub_date`` is assumed to be a ``datetime`` object.
        - ``tags`` and ``categories`` are sequences of strings.
        - ``comments`` is a sequence of dicts - each dict should be the
          return value of ``add_comment``.
        """
        if not title:
            title = strip_tags(content).split(". ")[0]
        title = decode_entities(title)
        if categories is None:
            categories = []
        if tags is None:
            tags = []
        if comments is None:
            comments = []
        self.posts.append({
            "title": force_text(title),
            "publish_date": pub_date,
            "content": force_text(content),
            "categories": categories,
            "tags": tags,
            "comments": comments,
            "old_url": old_url,
        })
        return self.posts[-1]

    def add_page(self, title=None, content=None, old_url=None,
                 tags=None, old_id=None, old_parent_id=None):
        """
        Adds a page to the list of pages to be imported - used by the
        Wordpress importer.
        """
        if not title:
            text = decode_entities(strip_tags(content)).replace("\n", " ")
            title = text.split(". ")[0]
        if tags is None:
            tags = []
        self.pages.append({
            "title": title,
            "content": content,
            "tags": tags,
            "old_url": old_url,
            "old_id": old_id,
            "old_parent_id": old_parent_id,
        })

    def add_comment(self, post=None, name=None, email=None, pub_date=None,
                    website=None, body=None):
        """
        Adds a comment to the post provided.
        """
        if post is None:
            if not self.posts:
                raise CommandError("Cannot add comments without posts")
            post = self.posts[-1]
        post["comments"].append({
            "user_name": name,
            "user_email": email,
            "submit_date": pub_date,
            "user_url": website,
            "comment": body,
        })

    def trunc(self, model, prompt, **fields):
        """
        Truncates fields values for the given model. Prompts for a new
        value if truncation occurs.
        """
        for field_name, value in fields.items():
            field = model._meta.get_field(field_name)
            max_length = getattr(field, "max_length", None)
            if not max_length:
                continue
            elif not prompt:
                fields[field_name] = value[:max_length]
                continue
            while len(value) > max_length:
                encoded_value = value.encode("utf-8")
                new_value = input("The value for the field %s.%s exceeds "
                    "its maximum length of %s chars: %s\n\nEnter a new value "
                    "for it, or press return to have it truncated: " %
                    (model.__name__, field_name, max_length, encoded_value))
                value = new_value if new_value else value[:max_length]
            fields[field_name] = value
        return fields

    def handle(self, *args, **options):
        """
        Processes the converted data into the Mezzanine database correctly.

        Attributes:
            mezzanine_user: the user to put this data in against
            date_format: the format the dates are in for posts and comments
        """

        mezzanine_user = options.get("mezzanine_user")
        site = Site.objects.get_current()
        verbosity = int(options.get("verbosity", 1))
        prompt = options.get("interactive")

        # Validate the Mezzanine user.
        if mezzanine_user is None:
            raise CommandError("No Mezzanine user has been specified")
        try:
            mezzanine_user = User.objects.get(username=mezzanine_user)
        except User.DoesNotExist:
            raise CommandError("Invalid Mezzanine user: %s" % mezzanine_user)

        # Run the subclassed ``handle_import`` and save posts, tags,
        # categories, and comments to the DB.
        self.handle_import(options)
        for post_data in self.posts:
            categories = post_data.pop("categories")
            tags = post_data.pop("tags")
            comments = post_data.pop("comments")
            old_url = post_data.pop("old_url")
            post_data = self.trunc(BlogPost, prompt, **post_data)
            initial = {
                "title": post_data.pop("title"),
                "user": mezzanine_user,
            }
            post, created = BlogPost.objects.get_or_create(**initial)
            for k, v in post_data.items():
                setattr(post, k, v)
            post.save()
            if created and verbosity >= 1:
                print("Imported post: %s" % post)
            for name in categories:
                cat = self.trunc(BlogCategory, prompt, title=name)
                if not cat["title"]:
                    continue
                cat, created = BlogCategory.objects.get_or_create(**cat)
                if created and verbosity >= 1:
                    print("Imported category: %s" % cat)
                post.categories.add(cat)
            for comment in comments:
                comment = self.trunc(ThreadedComment, prompt, **comment)
                comment["site"] = site
                post.comments.add(ThreadedComment(**comment))
                if verbosity >= 1:
                    print("Imported comment by: %s" % comment["user_name"])
            self.add_meta(post, tags, prompt, verbosity, old_url)

        # Create any pages imported (Wordpress can include pages)
        in_menus = []
        footer = [menu[0] for menu in settings.PAGE_MENU_TEMPLATES
                  if menu[-1] == "pages/menus/footer.html"]
        if options["in_navigation"]:
            in_menus = [menu[0] for menu in settings.PAGE_MENU_TEMPLATES]
            if footer and not options["in_footer"]:
                in_menus.remove(footer[0])
        elif footer and options["in_footer"]:
            in_menus = footer
        parents = []
        for page in self.pages:
            tags = page.pop("tags")
            old_url = page.pop("old_url")
            old_id = page.pop("old_id")
            old_parent_id = page.pop("old_parent_id")
            page = self.trunc(RichTextPage, prompt, **page)
            page["status"] = CONTENT_STATUS_PUBLISHED
            page["in_menus"] = in_menus
            page, created = RichTextPage.objects.get_or_create(**page)
            if created and verbosity >= 1:
                print("Imported page: %s" % page)
            self.add_meta(page, tags, prompt, verbosity, old_url)
            parents.append({
                'old_id': old_id,
                'old_parent_id': old_parent_id,
                'page': page,
            })

        for obj in parents:
            if obj['old_parent_id']:
                for parent in parents:
                    if parent['old_id'] == obj['old_parent_id']:
                        obj['page'].parent = parent['page']
                        obj['page'].save()
                        break

    def add_meta(self, obj, tags, prompt, verbosity, old_url=None):
        """
        Adds tags and a redirect for the given obj, which is a blog
        post or a page.
        """
        for tag in tags:
            keyword = self.trunc(Keyword, prompt, title=tag)
            keyword, created = Keyword.objects.get_or_create_iexact(**keyword)
            obj.keywords.add(AssignedKeyword(keyword=keyword))
            if created and verbosity >= 1:
                print("Imported tag: %s" % keyword)
        if old_url is not None:
            old_path = urlparse(old_url).path
            if not old_path.strip("/"):
                return
            redirect = self.trunc(Redirect, prompt, old_path=old_path)
            redirect['site'] = Site.objects.get_current()
            redirect, created = Redirect.objects.get_or_create(**redirect)
            redirect.new_path = obj.get_absolute_url()
            redirect.save()
            if created and verbosity >= 1:
                print("Created redirect for: %s" % old_url)

    def handle_import(self, options):
        """
        Should be overridden by subclasses - performs the conversion from
        the originating data source into the lists of posts and comments
        ready for processing.
        """
        raise NotImplementedError
예제 #60
0
class Opts(object):
    """A namespace class for individual options we'll build parsers from."""

    append = optparse.make_option(
        '-a',
        '--append',
        action='store_true',
        help="Append coverage data to .coverage, otherwise it is started "
        "clean with each run.")
    branch = optparse.make_option(
        '',
        '--branch',
        action='store_true',
        help="Measure branch coverage in addition to statement coverage.")
    CONCURRENCY_CHOICES = [
        "thread",
        "gevent",
        "greenlet",
        "eventlet",
        "multiprocessing",
    ]
    concurrency = optparse.make_option(
        '',
        '--concurrency',
        action='store',
        metavar="LIB",
        choices=CONCURRENCY_CHOICES,
        help="Properly measure code using a concurrency library. "
        "Valid values are: %s." % ", ".join(CONCURRENCY_CHOICES))
    debug = optparse.make_option('',
                                 '--debug',
                                 action='store',
                                 metavar="OPTS",
                                 help="Debug options, separated by commas")
    directory = optparse.make_option('-d',
                                     '--directory',
                                     action='store',
                                     metavar="DIR",
                                     help="Write the output files to DIR.")
    fail_under = optparse.make_option(
        '',
        '--fail-under',
        action='store',
        metavar="MIN",
        type="int",
        help="Exit with a status of 2 if the total coverage is less than MIN.")
    help = optparse.make_option('-h',
                                '--help',
                                action='store_true',
                                help="Get help on this command.")
    ignore_errors = optparse.make_option(
        '-i',
        '--ignore-errors',
        action='store_true',
        help="Ignore errors while reading source files.")
    include = optparse.make_option(
        '',
        '--include',
        action='store',
        metavar="PAT1,PAT2,...",
        help="Include only files whose paths match one of these patterns. "
        "Accepts shell-style wildcards, which must be quoted.")
    pylib = optparse.make_option(
        '-L',
        '--pylib',
        action='store_true',
        help="Measure coverage even inside the Python installed library, "
        "which isn't done by default.")
    show_missing = optparse.make_option(
        '-m',
        '--show-missing',
        action='store_true',
        help="Show line numbers of statements in each module that weren't "
        "executed.")
    skip_covered = optparse.make_option('--skip-covered',
                                        action='store_true',
                                        help="Skip files with 100% coverage.")
    omit = optparse.make_option(
        '',
        '--omit',
        action='store',
        metavar="PAT1,PAT2,...",
        help="Omit files whose paths match one of these patterns. "
        "Accepts shell-style wildcards, which must be quoted.")
    output_xml = optparse.make_option(
        '-o',
        '',
        action='store',
        dest="outfile",
        metavar="OUTFILE",
        help="Write the XML report to this file. Defaults to 'coverage.xml'")
    parallel_mode = optparse.make_option(
        '-p',
        '--parallel-mode',
        action='store_true',
        help="Append the machine name, process id and random number to the "
        ".coverage data file name to simplify collecting data from "
        "many processes.")
    module = optparse.make_option(
        '-m',
        '--module',
        action='store_true',
        help="<pyfile> is an importable Python module, not a script path, "
        "to be run as 'python -m' would run it.")
    rcfile = optparse.make_option(
        '',
        '--rcfile',
        action='store',
        help="Specify configuration file.  Defaults to '.coveragerc'")
    source = optparse.make_option(
        '',
        '--source',
        action='store',
        metavar="SRC1,SRC2,...",
        help="A list of packages or directories of code to be measured.")
    timid = optparse.make_option(
        '',
        '--timid',
        action='store_true',
        help="Use a simpler but slower trace method.  Try this if you get "
        "seemingly impossible results!")
    title = optparse.make_option(
        '',
        '--title',
        action='store',
        metavar="TITLE",
        help="A text string to use as the title on the HTML.")
    version = optparse.make_option(
        '',
        '--version',
        action='store_true',
        help="Display version information and exit.")