Пример #1
0
def main():

    parser = OptionParser(usage="Usage: %prog")
    parser.parse_args()
    common.read_config(None)

    metadata.read_metadata(xref=True)
Пример #2
0
def main():

    parser = ArgumentParser(usage="%(prog)s")
    common.setup_global_opts(parser)
    parser.parse_args()
    common.read_config(None)

    metadata.read_metadata(xref=True)
Пример #3
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-p", "--package", default=None,
                      help="Process only the specified package")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    apps = metadata.read_metadata(package=options.package, xref=False)

    if len(apps) == 0 and options.package:
        print "No such package"
        sys.exit(1)

    for app in apps:
        print "Writing " + app['id']
        metadata.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)

    print "Finished."
Пример #4
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if len(args) != 1:
        print "Specify a single command"
        sys.exit(1)

    if args[0] != 'init' and args[0] != 'update':
        print "The only commands currently supported are 'init' and 'update'"
        sys.exit(1)

    serverwebroot = config['serverwebroot'].rstrip('/').replace('//', '/')
    host, fdroiddir = serverwebroot.split(':')
    serverrepobase = os.path.basename(fdroiddir)
    if 'nonstandardwebroot' in config and config['nonstandardwebroot'] == True:
        standardwebroot = False
    else:
        standardwebroot = True
    if serverrepobase != 'fdroid' and standardwebroot:
        print('ERROR: serverwebroot does not end with "fdroid", '
              + 'perhaps you meant one of these:\n\t'
              + serverwebroot.rstrip('/') + '/fdroid\n\t'
              + serverwebroot.rstrip('/').rstrip(serverrepobase) + 'fdroid')
        sys.exit(1)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')

    for repodir in repodirs:
        if args[0] == 'init':
            if subprocess.call(['ssh', '-v', host,
                                'mkdir -p', fdroiddir + '/' + repodir]) != 0:
                sys.exit(1)
        elif args[0] == 'update':
            index = os.path.join(repodir, 'index.xml')
            indexjar = os.path.join(repodir, 'index.jar')
            if subprocess.call(['rsync', '-u', '-v', '-r', '--delete',
                                '--exclude', index, '--exclude', indexjar,
                                repodir, config['serverwebroot']]) != 0:
                sys.exit(1)
            if subprocess.call(['rsync', '-u', '-v', '-r', '--delete',
                                index,
                                config['serverwebroot'] + '/' + repodir]) != 0:
                sys.exit(1)
            if subprocess.call(['rsync', '-u', '-v', '-r', '--delete',
                                indexjar,
                                config['serverwebroot'] + '/' + repodir]) != 0:
                sys.exit(1)

    sys.exit(0)
Пример #5
0
def is_admin(nick, log=logger.log):
    try:
        adminlist = common.read_config('admins.json')
    except Exception as e:
        log('error', e)
        return False
    if not isinstance(adminlist, list):
        log('error', 'admins.json should contain a list')
        return False
    return nick in adminlist
Пример #6
0
def main():
    settings = common.read_config('config.json')
    if not valid_settings(settings):
        exit()

    state = statekeeper.StateKeeper()

    while True:
        if irc.run(settings, state) == 'quit':
            break
        sleep(settings['irc']['reconnect_delay'])
Пример #7
0
def main():

    global config, options

    anywarns = False

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("-f", "--format", action="store_true", default=False,
                        help="Also warn about formatting issues, like rewritemeta -l")
    parser.add_argument("appid", nargs='*', help="app-id in the form APPID")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(options.appid, allapps, False)

    for appid, app in apps.iteritems():
        if app.Disabled:
            continue

        warns = []

        for check_func in [
                check_regexes,
                check_ucm_tags,
                check_char_limits,
                check_old_links,
                check_checkupdates_ran,
                check_useless_fields,
                check_empty_fields,
                check_categories,
                check_duplicates,
                check_mediawiki_links,
                check_bulleted_lists,
                check_builds,
                ]:
            warns += check_func(app)

        if options.format:
            if not rewritemeta.proper_format(app):
                warns.append("Run rewritemeta to fix formatting")

        if warns:
            anywarns = True
            for warn in warns:
                print("%s: %s" % (appid, warn))

    if anywarns:
        sys.exit(1)
Пример #8
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("-l", "--list", action="store_true", default=False,
                        help="List files that would be reformatted")
    parser.add_argument("-t", "--to", default=None,
                        help="Rewrite to a specific format")
    parser.add_argument("appid", nargs='*', help="app-id in the form APPID")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(options.appid, allapps, False)

    if options.list and options.to is not None:
        parser.error("Cannot use --list and --to at the same time")

    supported = ['txt', 'yaml']

    if options.to is not None and options.to not in supported:
        parser.error("Must give a valid format to --to")

    for appid, app in apps.iteritems():
        base, ext = common.get_extension(app.metadatapath)
        if not options.to and ext not in supported:
            logging.info("Ignoring %s file at '%s'" % (ext, app.metadatapath))
            continue

        to_ext = ext
        if options.to is not None:
            to_ext = options.to

        if options.list:
            if not proper_format(app):
                print app.metadatapath
            continue

        with open(base + '.' + to_ext, 'w') as f:
            metadata.write_metadata(to_ext, f, app)

        if ext != to_ext:
            os.remove(app.metadatapath)

    logging.debug("Finished.")
Пример #9
0
def check_domains(domain_base):
	import os.path

	app_config = common.read_config()

	extensions = read_extensions(common.get_full_path(app_config['ExtensionsFile']))

	domains_to_check = set()

	for e in extensions:
		domains_to_check.add(domain_base+'.'+e)

	domains_info = api_check_domains(domains_to_check, app_config['RrpproxyLogin'], app_config['RrpproxyPw'])

	write_csv(domains_info, domain_base, os.path.join(app_config['OutputPath'], domain_base+'.csv'))
Пример #10
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options]")
    common.setup_global_opts(parser)
    options = parser.parse_args()

    config = common.read_config(options)

    if 'jarsigner' not in config:
        logging.critical('Java jarsigner not found! Install in standard location or set java_paths!')
        sys.exit(1)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')

    signed = 0
    for output_dir in repodirs:
        if not os.path.isdir(output_dir):
            logging.error("Missing output directory '" + output_dir + "'")
            sys.exit(1)

        unsigned = os.path.join(output_dir, 'index_unsigned.jar')
        if os.path.exists(unsigned):

            args = [config['jarsigner'], '-keystore', config['keystore'],
                    '-storepass:file', config['keystorepassfile'],
                    '-digestalg', 'SHA1', '-sigalg', 'SHA1withRSA',
                    unsigned, config['repo_keyalias']]
            if config['keystore'] == 'NONE':
                args += config['smartcardoptions']
            else:  # smardcards never use -keypass
                args += ['-keypass:file', config['keypassfile']]
            p = FDroidPopen(args)
            if p.returncode != 0:
                logging.critical("Failed to sign index")
                sys.exit(1)
            os.rename(unsigned, os.path.join(output_dir, 'index.jar'))
            logging.info('Signed index in ' + output_dir)
            signed += 1

    if signed == 0:
        logging.info("Nothing to do")
Пример #11
0
	def browser(self):
		import sys, subprocess, os.path, os

		if sys.platform == 'darwin':
			def openFolder(path):
				subprocess.call(['open', path])
		elif sys.platform.startswith('linux'):
			def openFolder(path):
				subprocess.call(['gnome-open', path])
		elif sys.platform == 'win32':
			def openFolder(path):
				subprocess.call(['explorer', path])

		app_config = common.read_config()

		output_path = common.get_full_path(app_config['OutputPath'])
		os.makedirs(output_path, 0o755, True)
		openFolder(output_path)
Пример #12
0
def main():
    startuptime = time()
    # Crash like hell if the config is crap!
    settings = common.read_config('config')
    # Kind of like state
    message = ''
    while True:
        result = irc.run(message, settings, startuptime)
        message = ''
        if result == 'reconnect':
            continue
        elif result == 'restart':
            try:
                reload(irc)
            except Exception as e:
                message = str(e)
            continue
        else:
            break
Пример #13
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options]")
    parser.add_option(
        "-v", "--verbose", action="store_true", default=False, help="Spew out even more information than normal"
    )
    parser.add_option(
        "-q", "--quiet", action="store_true", default=False, help="Restrict output to warnings and errors"
    )
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    repodirs = ["repo"]
    if config["archive_older"] != 0:
        repodirs.append("archive")

    for output_dir in repodirs:
        if not os.path.isdir(output_dir):
            logging.error("Missing output directory '" + output_dir + "'")
            sys.exit(1)

        # Process any apks that are waiting to be signed...
        for apkfile in sorted(glob.glob(os.path.join(output_dir, "*.apk"))):

            apkfilename = os.path.basename(apkfile)
            sigfilename = apkfilename + ".asc"
            sigpath = os.path.join(output_dir, sigfilename)

            if not os.path.exists(sigpath):
                gpgargs = ["gpg", "-a", "--output", sigpath, "--detach-sig"]
                if "gpghome" in config:
                    gpgargs.extend(["--homedir", config["gpghome"]])
                gpgargs.append(os.path.join(output_dir, apkfilename))
                p = FDroidPopen(gpgargs)
                if p.returncode != 0:
                    logging.error("Signing failed.")
                    sys.exit(1)

                logging.info("Signed " + apkfilename)
Пример #14
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options]")
    common.setup_global_opts(parser)
    options = parser.parse_args()

    config = common.read_config(options)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')

    for output_dir in repodirs:
        if not os.path.isdir(output_dir):
            logging.error("Missing output directory '" + output_dir + "'")
            sys.exit(1)

        # Process any apks that are waiting to be signed...
        for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):

            apkfilename = os.path.basename(apkfile)
            sigfilename = apkfilename + ".asc"
            sigpath = os.path.join(output_dir, sigfilename)

            if not os.path.exists(sigpath):
                gpgargs = ['gpg', '-a',
                           '--output', sigpath,
                           '--detach-sig']
                if 'gpghome' in config:
                    gpgargs.extend(['--homedir', config['gpghome']])
                if 'gpgkey' in config:
                    gpgargs.extend(['--local-user', config['gpgkey']])
                gpgargs.append(os.path.join(output_dir, apkfilename))
                p = FDroidPopen(gpgargs)
                if p.returncode != 0:
                    logging.error("Signing failed.")
                    sys.exit(1)

                logging.info('Signed ' + apkfilename)
Пример #15
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options]")
    common.setup_global_opts(parser)
    options = parser.parse_args()

    config = common.read_config(options)

    repodirs = ["repo"]
    if config["archive_older"] != 0:
        repodirs.append("archive")

    for output_dir in repodirs:
        if not os.path.isdir(output_dir):
            logging.error("Missing output directory '" + output_dir + "'")
            sys.exit(1)

        # Process any apks that are waiting to be signed...
        for apkfile in sorted(glob.glob(os.path.join(output_dir, "*.apk"))):

            apkfilename = os.path.basename(apkfile)
            sigfilename = apkfilename + ".asc"
            sigpath = os.path.join(output_dir, sigfilename)

            if not os.path.exists(sigpath):
                gpgargs = ["gpg", "-a", "--output", sigpath, "--detach-sig"]
                if "gpghome" in config:
                    gpgargs.extend(["--homedir", config["gpghome"]])
                gpgargs.append(os.path.join(output_dir, apkfilename))
                p = FDroidPopen(gpgargs)
                if p.returncode != 0:
                    logging.error("Signing failed.")
                    sys.exit(1)

                logging.info("Signed " + apkfilename)
Пример #16
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(args, allapps, False)

    for appid, app in apps.iteritems():
        logging.info("Writing " + appid)
        metadata.write_metadata(os.path.join('metadata', appid) + '.txt', app)

    logging.info("Finished.")
Пример #17
0
def main():
    # parse options
    (options, args) = parse_options()

    if os.isatty(sys.stdin.fileno()):
        raise RuntimeError('Need configuration in stdin.')
    config = common.read_config(sys.stdin)
    conn = common.connect(config.s3)
    bucket = None

    try:
        # setup
        real_stdout = sys.stdout
        sys.stdout = sys.stderr

        # verify all required config items are present
        if 'roundtrip' not in config:
            raise RuntimeError('roundtrip section not found in config')
        for item in ['readers', 'writers', 'duration', 'files', 'bucket']:
            if item not in config.roundtrip:
                raise RuntimeError(
                    "Missing roundtrip config item: {item}".format(item=item))
        for item in ['num', 'size', 'stddev']:
            if item not in config.roundtrip.files:
                raise RuntimeError(
                    "Missing roundtrip config item: files.{item}".format(
                        item=item))

        seeds = dict(config.roundtrip.get('random_seed', {}))
        seeds.setdefault('main', random.randrange(2**32))

        rand = random.Random(seeds['main'])

        for name in ['names', 'contents', 'writer', 'reader']:
            seeds.setdefault(name, rand.randrange(2**32))

        print 'Using random seeds: {seeds}'.format(seeds=seeds)

        # setup bucket and other objects
        bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket,
                                                  max_len=30)
        bucket = conn.create_bucket(bucket_name)
        print "Created bucket: {name}".format(name=bucket.name)
        objnames = realistic.names(
            mean=15,
            stddev=4,
            seed=seeds['names'],
        )
        objnames = itertools.islice(objnames, config.roundtrip.files.num)
        objnames = list(objnames)
        files = realistic.files(
            mean=1024 * config.roundtrip.files.size,
            stddev=1024 * config.roundtrip.files.stddev,
            seed=seeds['contents'],
        )
        q = gevent.queue.Queue()

        logger_g = gevent.spawn_link_exception(yaml.safe_dump_all,
                                               q,
                                               stream=real_stdout)

        print "Writing {num} objects with {w} workers...".format(
            num=config.roundtrip.files.num,
            w=config.roundtrip.writers,
        )
        pool = gevent.pool.Pool(size=config.roundtrip.writers)
        start = time.time()
        for objname in objnames:
            fp = next(files)
            pool.spawn_link_exception(
                writer,
                bucket=bucket,
                objname=objname,
                fp=fp,
                queue=q,
            )
        pool.join()
        stop = time.time()
        elapsed = stop - start
        q.put(
            dict(
                type='write_done',
                duration=int(round(elapsed * NANOSECOND)),
            ))

        print "Reading {num} objects with {w} workers...".format(
            num=config.roundtrip.files.num,
            w=config.roundtrip.readers,
        )
        # avoid accessing them in the same order as the writing
        rand.shuffle(objnames)
        pool = gevent.pool.Pool(size=config.roundtrip.readers)
        start = time.time()
        for objname in objnames:
            pool.spawn_link_exception(
                reader,
                bucket=bucket,
                objname=objname,
                queue=q,
            )
        pool.join()
        stop = time.time()
        elapsed = stop - start
        q.put(
            dict(
                type='read_done',
                duration=int(round(elapsed * NANOSECOND)),
            ))

        q.put(StopIteration)
        logger_g.get()

    finally:
        # cleanup
        if options.cleanup:
            if bucket is not None:
                common.nuke_bucket(bucket)
Пример #18
0
def main():
    # parse options
    (options, args) = parse_options()

    if os.isatty(sys.stdin.fileno()):
        raise RuntimeError('Need configuration in stdin.')
    config = common.read_config(sys.stdin)
    conn = common.connect(config.s3)
    bucket = None

    try:
        # setup
        real_stdout = sys.stdout
        sys.stdout = sys.stderr

        # verify all required config items are present
        if 'readwrite' not in config:
            raise RuntimeError('readwrite section not found in config')
        for item in ['readers', 'writers', 'duration', 'files', 'bucket']:
            if item not in config.readwrite:
                raise RuntimeError("Missing readwrite config item: {item}".format(item=item))
        for item in ['num', 'size', 'stddev']:
            if item not in config.readwrite.files:
                raise RuntimeError("Missing readwrite config item: files.{item}".format(item=item))

        seeds = dict(config.readwrite.get('random_seed', {}))
        seeds.setdefault('main', random.randrange(2**32))

        rand = random.Random(seeds['main'])

        for name in ['names', 'contents', 'writer', 'reader']:
            seeds.setdefault(name, rand.randrange(2**32))

        print 'Using random seeds: {seeds}'.format(seeds=seeds)

        # setup bucket and other objects
        bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30)
        bucket = conn.create_bucket(bucket_name)
        print "Created bucket: {name}".format(name=bucket.name)
        file_names = realistic.names(
            mean=15,
            stddev=4,
            seed=seeds['names'],
            )
        file_names = itertools.islice(file_names, config.readwrite.files.num)
        file_names = list(file_names)
        files = realistic.files2(
            mean=1024 * config.readwrite.files.size,
            stddev=1024 * config.readwrite.files.stddev,
            seed=seeds['contents'],
            )
        q = gevent.queue.Queue()

        # warmup - get initial set of files uploaded
        print "Uploading initial set of {num} files".format(num=config.readwrite.files.num)
        warmup_pool = gevent.pool.Pool(size=100)
        for file_name in file_names:
            fp = next(files)
            warmup_pool.spawn_link_exception(
                write_file,
                bucket=bucket,
                file_name=file_name,
                fp=fp,
                )
        warmup_pool.join()

        # main work
        print "Starting main worker loop."
        print "Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev)
        print "Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers)
        group = gevent.pool.Group()
        rand_writer = random.Random(seeds['writer'])
        for x in xrange(config.readwrite.writers):
            this_rand = random.Random(rand_writer.randrange(2**32))
            group.spawn_link_exception(
                writer,
                bucket=bucket,
                worker_id=x,
                file_names=file_names,
                files=files,
                queue=q,
                rand=this_rand,
                )
        rand_reader = random.Random(seeds['reader'])
        for x in xrange(config.readwrite.readers):
            this_rand = random.Random(rand_reader.randrange(2**32))
            group.spawn_link_exception(
                reader,
                bucket=bucket,
                worker_id=x,
                file_names=file_names,
                queue=q,
                rand=this_rand,
                )
        def stop():
            group.kill(block=True)
            q.put(StopIteration)
        gevent.spawn_later(config.readwrite.duration, stop)

        yaml.safe_dump_all(q, stream=real_stdout)

    finally:
        # cleanup
        if options.cleanup:
            if bucket is not None:
                common.nuke_bucket(bucket)
Пример #19
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] "
                          "[APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    log_dir = 'logs'
    if not os.path.isdir(log_dir):
        logging.info("Creating log directory")
        os.makedirs(log_dir)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    output_dir = 'repo'
    if not os.path.isdir(output_dir):
        logging.info("Creating output directory")
        os.makedirs(output_dir)

    unsigned_dir = 'unsigned'
    if not os.path.isdir(unsigned_dir):
        logging.warning("No unsigned directory - nothing to do")
        sys.exit(1)

    for f in [
            config['keystorepassfile'], config['keystore'],
            config['keypassfile']
    ]:
        if not os.path.exists(f):
            logging.error("Config error - missing '{0}'".format(f))
            sys.exit(1)

    # It was suggested at
    #    https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit
    # that a package could be crafted, such that it would use the same signing
    # key as an existing app. While it may be theoretically possible for such a
    # colliding package ID to be generated, it seems virtually impossible that
    # the colliding ID would be something that would be a) a valid package ID,
    # and b) a sane-looking ID that would make its way into the repo.
    # Nonetheless, to be sure, before publishing we check that there are no
    # collisions, and refuse to do any publishing if that's the case...
    allapps = metadata.read_metadata()
    vercodes = common.read_pkg_args(args, True)
    allaliases = []
    for appid in allapps:
        m = md5.new()
        m.update(appid)
        keyalias = m.hexdigest()[:8]
        if keyalias in allaliases:
            logging.error("There is a keyalias collision - publishing halted")
            sys.exit(1)
        allaliases.append(keyalias)
    logging.info("{0} apps, {0} key aliases".format(len(allapps),
                                                    len(allaliases)))

    # Process any apks that are waiting to be signed...
    for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):

        appid, vercode = common.apknameinfo(apkfile)
        apkfilename = os.path.basename(apkfile)
        if vercodes and appid not in vercodes:
            continue
        if appid in vercodes and vercodes[appid]:
            if vercode not in vercodes[appid]:
                continue
        logging.info("Processing " + apkfile)

        # Figure out the key alias name we'll use. Only the first 8
        # characters are significant, so we'll use the first 8 from
        # the MD5 of the app's ID and hope there are no collisions.
        # If a collision does occur later, we're going to have to
        # come up with a new alogrithm, AND rename all existing keys
        # in the keystore!
        if appid in config['keyaliases']:
            # For this particular app, the key alias is overridden...
            keyalias = config['keyaliases'][appid]
            if keyalias.startswith('@'):
                m = md5.new()
                m.update(keyalias[1:])
                keyalias = m.hexdigest()[:8]
        else:
            m = md5.new()
            m.update(appid)
            keyalias = m.hexdigest()[:8]
        logging.info("Key alias: " + keyalias)

        # See if we already have a key for this application, and
        # if not generate one...
        p = FDroidPopen([
            'keytool', '-list', '-alias', keyalias, '-keystore',
            config['keystore'], '-storepass:file', config['keystorepassfile']
        ])
        if p.returncode != 0:
            logging.info("Key does not exist - generating...")
            p = FDroidPopen([
                'keytool', '-genkey', '-keystore', config['keystore'],
                '-alias', keyalias, '-keyalg', 'RSA', '-keysize', '2048',
                '-validity', '10000', '-storepass:file',
                config['keystorepassfile'], '-keypass:file',
                config['keypassfile'], '-dname', config['keydname']
            ])
            # TODO keypass should be sent via stdin
            if p.returncode != 0:
                raise BuildException("Failed to generate key")

        # Sign the application...
        p = FDroidPopen([
            'jarsigner', '-keystore', config['keystore'], '-storepass:file',
            config['keystorepassfile'], '-keypass:file', config['keypassfile'],
            '-sigalg', 'MD5withRSA', '-digestalg', 'SHA1', apkfile, keyalias
        ])
        # TODO keypass should be sent via stdin
        if p.returncode != 0:
            raise BuildException("Failed to sign application")

        # Zipalign it...
        p = FDroidPopen([
            config['zipalign'], '-v', '4', apkfile,
            os.path.join(output_dir, apkfilename)
        ])
        if p.returncode != 0:
            raise BuildException("Failed to align application")
        os.remove(apkfile)

        # Move the source tarball into the output directory...
        tarfilename = apkfilename[:-4] + '_src.tar.gz'
        tarfile = os.path.join(unsigned_dir, tarfilename)
        if os.path.exists(tarfile):
            shutil.move(tarfile, os.path.join(output_dir, tarfilename))

        logging.info('Published ' + apkfilename)
Пример #20
0
def main(config_dir, home_dir, output_dir):
    config = read_config(config_dir)
    if 'channel_id' not in config:
        write_status(output_dir, False, 'No channel token given')
        return 1

    channel = config['channel_id']

    success, msg, client = new_client(config)

    if not success:
        write_status(output_dir, False, msg)
        return 1

    if 'deadlines' not in config:
        write_status(output_dir, True, 'No deadlines to report')
        return 1

    deadlines = config['deadlines']
    if not isinstance(deadlines, dict):
        write_status(output_dir, False, 'Invalid deadlines structure')
        return 0

    attachments = []
    present = datetime.datetime.now()
    for (name, info) in deadlines.items():
        if 'date' not in info:
            write_status(
                output_dir, False,
                'Date missing in entry {} under {}'.format(info, name))
            return 1

        date = None
        try:
            date = datetime.datetime.strptime(info['date'],
                                              '%Y-%m-%d %H:%M:%S')
        except Exception as e:
            write_status(output_dir, False,
                         'Could not parse date {}'.format(info['date']))
            return 1

        diff = date - present
        days_left = diff.days
        if days_left < 0:
            # elapsed, so forget it
            continue
        alert = days_left <= 7

        time_left_msg = '{} days, {:.2f} hours left'.format(
            diff.days, diff.seconds / 3600)
        fields = [build_field(value=time_left_msg)]
        if alert and 'ping' in info:
            pings = generate_ping_list(info['ping'])
            fields.append(build_field(value='Beware {}!'.format(pings)))
        attachments.append(
            build_attachment(title=name,
                             text='Deadline: {}'.format(info['date']),
                             fields=fields,
                             color='#fa0000' if alert else '#0fbf24'))

    if not attachments:
        write_status(output_dir, True, 'All deadlines elapsed')
        return 0

    success, _, report = post_message(
        client, channel,
        build_message(text='*Upcoming Deadlines*', attachments=attachments))
    write_status(output_dir, success, report)
Пример #21
0
def main():
    # parse options
    (options, args) = parse_options()

    if os.isatty(sys.stdin.fileno()):
        raise RuntimeError('Need configuration in stdin.')
    config = common.read_config(sys.stdin)
    conn = common.connect(config.s3)
    bucket = None

    try:
        # setup
        real_stdout = sys.stdout
        sys.stdout = sys.stderr

        # verify all required config items are present
        if 'readwrite' not in config:
            raise RuntimeError('readwrite section not found in config')
        for item in ['readers', 'writers', 'duration', 'files', 'bucket']:
            if item not in config.readwrite:
                raise RuntimeError("Missing readwrite config item: {item}".format(item=item))
        for item in ['num', 'size', 'stddev']:
            if item not in config.readwrite.files:
                raise RuntimeError("Missing readwrite config item: files.{item}".format(item=item))

        seeds = dict(config.readwrite.get('random_seed', {}))
        seeds.setdefault('main', random.randrange(2**32))

        rand = random.Random(seeds['main'])

        for name in ['names', 'contents', 'writer', 'reader']:
            seeds.setdefault(name, rand.randrange(2**32))

        print 'Using random seeds: {seeds}'.format(seeds=seeds)

        # setup bucket and other objects
        bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30)
        bucket = conn.create_bucket(bucket_name)
        print "Created bucket: {name}".format(name=bucket.name)

        # check flag for deterministic file name creation
        if not config.readwrite.get('deterministic_file_names'):
            print 'Creating random file names'
            file_names = realistic.names(
                mean=15,
                stddev=4,
                seed=seeds['names'],
                )
            file_names = itertools.islice(file_names, config.readwrite.files.num)
            file_names = list(file_names)
        else:
            print 'Creating file names that are deterministic'
            file_names = []
            for x in xrange(config.readwrite.files.num):
                file_names.append('test_file_{num}'.format(num=x))

        files = realistic.files2(
            mean=1024 * config.readwrite.files.size,
            stddev=1024 * config.readwrite.files.stddev,
            seed=seeds['contents'],
            )
        q = gevent.queue.Queue()

        
        # warmup - get initial set of files uploaded if there are any writers specified
        if config.readwrite.writers > 0:
            print "Uploading initial set of {num} files".format(num=config.readwrite.files.num)
            warmup_pool = gevent.pool.Pool(size=100)
            for file_name in file_names:
                fp = next(files)
                warmup_pool.spawn_link_exception(
                    write_file,
                    bucket=bucket,
                    file_name=file_name,
                    fp=fp,
                    )
            warmup_pool.join()

        # main work
        print "Starting main worker loop."
        print "Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev)
        print "Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers)
        group = gevent.pool.Group()
        rand_writer = random.Random(seeds['writer'])

        # Don't create random files if deterministic_files_names is set and true
        if not config.readwrite.get('deterministic_file_names'):
            for x in xrange(config.readwrite.writers):
                this_rand = random.Random(rand_writer.randrange(2**32))
                group.spawn_link_exception(
                    writer,
                    bucket=bucket,
                    worker_id=x,
                    file_names=file_names,
                    files=files,
                    queue=q,
                    rand=this_rand,
                    )

        # Since the loop generating readers already uses config.readwrite.readers
        # and the file names are already generated (randomly or deterministically),
        # this loop needs no additional qualifiers. If zero readers are specified,
        # it will behave as expected (no data is read)
        rand_reader = random.Random(seeds['reader'])
        for x in xrange(config.readwrite.readers):
            this_rand = random.Random(rand_reader.randrange(2**32))
            group.spawn_link_exception(
                reader,
                bucket=bucket,
                worker_id=x,
                file_names=file_names,
                queue=q,
                rand=this_rand,
                )
        def stop():
            group.kill(block=True)
            q.put(StopIteration)
        gevent.spawn_later(config.readwrite.duration, stop)

        # wait for all the tests to finish
        group.join()
        print 'post-join, queue size {size}'.format(size=q.qsize())

        if q.qsize() > 0:
            for temp_dict in q:
                if 'error' in temp_dict:
                    raise Exception('exception:\n\t{msg}\n\t{trace}'.format(
                                    msg=temp_dict['error']['msg'],
                                    trace=temp_dict['error']['traceback'])
                                   )
                else:
                    yaml.safe_dump(temp_dict, stream=real_stdout)

    finally:
        # cleanup
        if options.cleanup:
            if bucket is not None:
                common.nuke_bucket(bucket)
Пример #22
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-d", "--distinguished-name", default=None,
                      help="X.509 'Distiguished Name' used when generating keys")
    parser.add_option("--keystore", default=None,
                      help="Path to the keystore for the repo signing key")
    parser.add_option("--repo-keyalias", default=None,
                      help="Alias of the repo signing key in the keystore")
    (options, args) = parser.parse_args()

    # find root install prefix
    tmp = os.path.dirname(sys.argv[0])
    if os.path.basename(tmp) == 'bin':
        prefix = os.path.dirname(tmp)
        examplesdir = prefix + '/share/doc/fdroidserver/examples'
    else:
        # we're running straight out of the git repo
        prefix = tmp
        examplesdir = prefix

    fdroiddir = os.getcwd()

    if not os.path.exists('config.py') and not os.path.exists('repo'):
        # 'metadata' and 'tmp' are created in fdroid
        os.mkdir('repo')
        shutil.copy(os.path.join(examplesdir, 'fdroid-icon.png'), fdroiddir)
        shutil.copyfile(os.path.join(examplesdir, 'config.sample.py'), 'config.py')
        os.chmod('config.py', 0o0600)
    else:
        print('Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...')
        sys.exit()

    # now that we have a local config.py, read configuration...
    config = common.read_config(options)

    # track down where the Android SDK is
    if os.path.isdir(config['sdk_path']):
        print('Using "' + config['sdk_path'] + '" for the Android SDK')
        sdk_path = config['sdk_path']
    elif 'ANDROID_HOME' in os.environ.keys():
        sdk_path = os.environ['ANDROID_HOME']
    else:
        default_sdk_path = '/opt/android-sdk'
        while True:
            s = raw_input('Enter the path to the Android SDK (' + default_sdk_path + '): ')
            if re.match('^\s*$', s) != None:
                sdk_path = default_sdk_path
            else:
                sdk_path = s
            if os.path.isdir(os.path.join(sdk_path, 'build-tools')):
                break
            else:
                print('"' + s + '" does not contain the Android SDK! Try again...')
    if os.path.isdir(sdk_path):
        write_to_config('sdk_path', sdk_path)

    # try to find a working aapt, in all the recent possible paths
    build_tools = os.path.join(sdk_path, 'build-tools')
    aaptdirs = []
    aaptdirs.append(os.path.join(build_tools, config['build_tools']))
    aaptdirs.append(build_tools)
    for f in sorted(os.listdir(build_tools), reverse=True):
        if os.path.isdir(os.path.join(build_tools, f)):
            aaptdirs.append(os.path.join(build_tools, f))
    for d in aaptdirs:
        if os.path.isfile(os.path.join(d, 'aapt')):
            aapt = os.path.join(d, 'aapt')
            break
    if os.path.isfile(aapt):
        dirname = os.path.basename(os.path.dirname(aapt))
        if dirname == 'build-tools':
            # this is the old layout, before versioned build-tools
            write_to_config('build_tools', '')
        else:
            write_to_config('build_tools', dirname)

    # track down where the Android NDK is
    ndk_path = '/opt/android-ndk'
    if os.path.isdir(config['ndk_path']):
        ndk_path = config['ndk_path']
    elif 'ANDROID_NDK' in os.environ.keys():
        print('using ANDROID_NDK')
        ndk_path = os.environ['ANDROID_NDK']
    if os.path.isdir(ndk_path):
        write_to_config('ndk_path', ndk_path)
    # the NDK is optional so we don't prompt the user for it if its not found

    # find or generate the keystore for the repo signing key. First try the
    # path written in the default config.py.  Then check if the user has
    # specified a path from the command line, which will trump all others.
    # Otherwise, create ~/.local/share/fdroidserver and stick it in there.
    keystore = config['keystore']
    if options.keystore:
        if os.path.isfile(options.keystore):
            keystore = options.keystore
            write_to_config('keystore', keystore)
        else:
            print('"' + options.keystore + '" does not exist or is not a file!')
            sys.exit(1)
    if options.repo_keyalias:
        repo_keyalias = options.repo_keyalias
        write_to_config('repo_keyalias', repo_keyalias)
    if options.distinguished_name:
        keydname = options.distinguished_name
        write_to_config('keydname', keydname)
    if not os.path.isfile(keystore):
        # no existing or specified keystore, generate the whole thing
        keystoredir = os.path.join(os.getenv('HOME'),
                                   '.local', 'share', 'fdroidserver')
        if not os.path.exists(keystoredir):
            os.makedirs(keystoredir, mode=0o700)
        keystore = os.path.join(keystoredir, 'keystore.jks')
        write_to_config('keystore', keystore)
        password = genpassword()
        write_to_config('keystorepass', password)
        write_to_config('keypass', password)
        if not options.repo_keyalias:
            repo_keyalias = socket.getfqdn()
            write_to_config('repo_keyalias', repo_keyalias)
        if not options.distinguished_name:
            keydname = 'CN=' + repo_keyalias + ', OU=F-Droid'
            write_to_config('keydname', keydname)
        genkey(keystore, repo_keyalias, password, keydname)

    print('Built repo based in "' + fdroiddir + '"')
    print('with this config:')
    print('  Android SDK:\t\t\t' + sdk_path)
    print('  Android SDK Build Tools:\t' + os.path.dirname(aapt))
    print('  Android NDK (optional):\t' + ndk_path)
    print('  Keystore for signing key:\t' + keystore)
    print('\nTo complete the setup, add your APKs to "' +
          os.path.join(fdroiddir, 'repo') + '"' +
'''
then run "fdroid update -c; fdroid update".  You might also want to edit
"config.py" to set the URL, repo name, and more.  You should also set up
a signing key.

For more info: https://f-droid.org/manual/fdroid.html#Simple-Binary-Repository
and https://f-droid.org/manual/fdroid.html#Signing
''')
Пример #23
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser(
        usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    unsigned_dir = 'unsigned'
    if not os.path.isdir(unsigned_dir):
        logging.error("No unsigned directory - nothing to do")
        sys.exit(0)

    verified = 0
    notverified = 0

    vercodes = common.read_pkg_args(args, True)

    for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):

        apkfilename = os.path.basename(apkfile)
        appid, vercode = common.apknameinfo(apkfile)

        if vercodes and appid not in vercodes:
            continue
        if vercodes[appid] and vercode not in vercodes[appid]:
            continue

        try:

            logging.info("Processing " + apkfilename)

            remoteapk = os.path.join(tmp_dir, apkfilename)
            if os.path.exists(remoteapk):
                os.remove(remoteapk)
            url = 'https://f-droid.org/repo/' + apkfilename
            logging.info("...retrieving " + url)
            common.download_file(url, dldir=tmp_dir)

            compare_result = common.compare_apks(
                os.path.join(unsigned_dir, apkfilename), remoteapk, tmp_dir)
            if compare_result:
                raise FDroidException(compare_result)

            logging.info("...successfully verified")
            verified += 1

        except FDroidException, e:
            logging.info("...NOT verified - {0}".format(e))
            notverified += 1
Пример #24
0
def main():

    global config, options, curid, count
    curid = None

    count = Counter()

    def warn(message):
        global curid, count
        if curid:
            print "%s:" % curid
            curid = None
            count['app'] += 1
        print '    %s' % message
        count['warn'] += 1

    def pwarn(message):
        if options.pedantic:
            warn(message)

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option(
        "-p",
        "--pedantic",
        action="store_true",
        default=False,
        help="Show pedantic warnings that might give false positives")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=False)
    apps = common.read_app_args(args, allapps, False)

    for appid, app in apps.iteritems():
        curid = appid
        lastcommit = ''

        if app['Disabled']:
            continue

        for build in app['builds']:
            if build['commit'] and not build['disable']:
                lastcommit = build['commit']

        # Potentially incorrect UCM
        if (app['Update Check Mode'] == 'RepoManifest'
                and any(s in lastcommit for s in '.,_-/')):
            pwarn(
                "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'"
                % (lastcommit, app['Update Check Mode']))

        # Summary size limit
        summ_chars = len(app['Summary'])
        if summ_chars > config['char_limits']['Summary']:
            warn("Summary of length %s is over the %i char limit" %
                 (summ_chars, config['char_limits']['Summary']))

        # Redundant info
        if app['Web Site'] and app['Source Code']:
            if app['Web Site'].lower() == app['Source Code'].lower():
                warn("Website '%s' is just the app's source code link" %
                     app['Web Site'])
                app['Web Site'] = ''

        name = app['Name'] or app['Auto Name']
        if app['Summary'] and name:
            if app['Summary'].lower() == name.lower():
                warn("Summary '%s' is just the app's name" % app['Summary'])

        if app['Summary'] and app['Description']:
            if app['Summary'].lower() == app['Description'][0].lower():
                warn("Description '%s' is just the app's summary" %
                     app['Summary'])

        # Description size limit
        desc_chars = sum(len(l) for l in app['Description'])
        if desc_chars > config['char_limits']['Description']:
            warn("Description of length %s is over the %i char limit" %
                 (desc_chars, config['char_limits']['Description']))

        # Regex checks in all kinds of fields
        for f in regex_warnings:
            for m, r in regex_warnings[f]:
                t = metadata.metafieldtype(f)
                if t == 'string':
                    if m.match(app[f]):
                        warn("%s '%s': %s" % (f, app[f], r))
                elif t == 'multiline':
                    for l in app[f]:
                        if m.match(l):
                            warn("%s at line '%s': %s" % (f, l, r))

        # Regex pedantic checks in all kinds of fields
        if options.pedantic:
            for f in regex_pedantic:
                for m, r in regex_pedantic[f]:
                    if m.match(app[f]):
                        warn("%s '%s': %s" % (f, app[f], r))

        # Build warnings
        for build in app['builds']:
            if build['disable']:
                continue
            for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
                if build['commit'] and build['commit'].startswith(s):
                    warn("Branch '%s' used as commit in build '%s'" %
                         (s, build['version']))
                for srclib in build['srclibs']:
                    ref = srclib.split('@')[1].split('/')[0]
                    if ref.startswith(s):
                        warn("Branch '%s' used as commit in srclib '%s'" %
                             (s, srclib))
            for s in [
                    'git clone', 'git svn clone', 'svn checkout', 'svn co',
                    'hg clone'
            ]:
                for flag in ['init', 'prebuild', 'build']:
                    if not build[flag]:
                        continue
                    if s in build[flag]:
                        # TODO: This should not be pedantic!
                        pwarn("'%s' used in %s '%s'" % (s, flag, build[flag]))

        if not curid:
            print

    logging.info("Found a total of %i warnings in %i apps." %
                 (count['warn'], count['app']))
Пример #25
0
def main():
    # parse options
    (options, args) = parse_options()

    if os.isatty(sys.stdin.fileno()):
        raise RuntimeError('Need configuration in stdin.')
    config = common.read_config(sys.stdin)
    conn = common.connect(config.s3)
    bucket = None

    try:
        # setup
        real_stdout = sys.stdout
        sys.stdout = sys.stderr

        # verify all required config items are present
        if 'readwrite' not in config:
            raise RuntimeError('readwrite section not found in config')
        for item in ['readers', 'writers', 'duration', 'files', 'bucket']:
            if item not in config.readwrite:
                raise RuntimeError(
                    "Missing readwrite config item: {item}".format(item=item))
        for item in ['num', 'size', 'stddev']:
            if item not in config.readwrite.files:
                raise RuntimeError(
                    "Missing readwrite config item: files.{item}".format(
                        item=item))

        seeds = dict(config.readwrite.get('random_seed', {}))
        seeds.setdefault('main', random.randrange(2**32))

        rand = random.Random(seeds['main'])

        for name in ['names', 'contents', 'writer', 'reader']:
            seeds.setdefault(name, rand.randrange(2**32))

        print 'Using random seeds: {seeds}'.format(seeds=seeds)

        # setup bucket and other objects
        bucket_name = common.choose_bucket_prefix(config.readwrite.bucket,
                                                  max_len=30)
        bucket = conn.create_bucket(bucket_name)
        print "Created bucket: {name}".format(name=bucket.name)

        # check flag for deterministic file name creation
        if not config.readwrite.get('deterministic_file_names'):
            print 'Creating random file names'
            file_names = realistic.names(
                mean=15,
                stddev=4,
                seed=seeds['names'],
            )
            file_names = itertools.islice(file_names,
                                          config.readwrite.files.num)
            file_names = list(file_names)
        else:
            print 'Creating file names that are deterministic'
            file_names = []
            for x in xrange(config.readwrite.files.num):
                file_names.append('test_file_{num}'.format(num=x))

        files = realistic.files2(
            mean=1024 * config.readwrite.files.size,
            stddev=1024 * config.readwrite.files.stddev,
            seed=seeds['contents'],
        )
        q = gevent.queue.Queue()

        # warmup - get initial set of files uploaded if there are any writers specified
        if config.readwrite.writers > 0:
            print "Uploading initial set of {num} files".format(
                num=config.readwrite.files.num)
            warmup_pool = gevent.pool.Pool(size=100)
            for file_name in file_names:
                fp = next(files)
                warmup_pool.spawn_link_exception(
                    write_file,
                    bucket=bucket,
                    file_name=file_name,
                    fp=fp,
                )
            warmup_pool.join()

        # main work
        print "Starting main worker loop."
        print "Using file size: {size} +- {stddev}".format(
            size=config.readwrite.files.size,
            stddev=config.readwrite.files.stddev)
        print "Spawning {w} writers and {r} readers...".format(
            w=config.readwrite.writers, r=config.readwrite.readers)
        group = gevent.pool.Group()
        rand_writer = random.Random(seeds['writer'])

        # Don't create random files if deterministic_files_names is set and true
        if not config.readwrite.get('deterministic_file_names'):
            for x in xrange(config.readwrite.writers):
                this_rand = random.Random(rand_writer.randrange(2**32))
                group.spawn_link_exception(
                    writer,
                    bucket=bucket,
                    worker_id=x,
                    file_names=file_names,
                    files=files,
                    queue=q,
                    rand=this_rand,
                )

        # Since the loop generating readers already uses config.readwrite.readers
        # and the file names are already generated (randomly or deterministically),
        # this loop needs no additional qualifiers. If zero readers are specified,
        # it will behave as expected (no data is read)
        rand_reader = random.Random(seeds['reader'])
        for x in xrange(config.readwrite.readers):
            this_rand = random.Random(rand_reader.randrange(2**32))
            group.spawn_link_exception(
                reader,
                bucket=bucket,
                worker_id=x,
                file_names=file_names,
                queue=q,
                rand=this_rand,
            )

        def stop():
            group.kill(block=True)
            q.put(StopIteration)

        gevent.spawn_later(config.readwrite.duration, stop)

        # wait for all the tests to finish
        group.join()
        print 'post-join, queue size {size}'.format(size=q.qsize())

        if q.qsize() > 0:
            for temp_dict in q:
                if 'error' in temp_dict:
                    raise Exception('exception:\n\t{msg}\n\t{trace}'.format(
                        msg=temp_dict['error']['msg'],
                        trace=temp_dict['error']['traceback']))
                else:
                    yaml.safe_dump(temp_dict, stream=real_stdout)

    finally:
        # cleanup
        if options.cleanup:
            if bucket is not None:
                common.nuke_bucket(bucket)
Пример #26
0
def main(config_dir, home_dir, output_dir):
    info = DashboardInfo(home_dir)
    conf = read_config(config_dir)

    # delete old report so it doesn't hang around if we exit
    # without a new one
    if check_file_exists(output_dir, 'report.json'):
        subprocess.call(['rm', '-f', os.path.join(output_dir, 'report.json')])

    time_window = -1
    if 'time_window' in conf:
        time_window = int(conf['time_window'])
    pings = conf['notify'] if 'notify' in conf else []

    # map: exp -> [(fields w/ high SD, historic mean, SD, current)]
    exp_alerts = {}
    for exp in info.all_present_experiments():
        if not info.exp_active(exp):
            continue

        # not this subsystem's job to report on failures
        stage_statuses = info.exp_stage_statuses(exp)
        if 'run' not in stage_statuses or 'analysis' not in stage_statuses:
            continue
        if not stage_statuses['analysis']['success']:
            continue

        all_data = sort_data(info.exp_data_dir(exp))
        if len(all_data) <= 1:
            continue

        exp_alerts[exp] = []
        most_recent = all_data[-1]
        past_data = all_data[:-1]
        if time_window >= 1:
            past_data = [
                entry for entry in past_data
                if time_difference(most_recent, entry).days <= time_window
            ]

        field_values = traverse_fields(most_recent)
        for fields in itertools.product(*field_values):
            current_stat, _ = gather_stats([most_recent], fields)
            current = current_stat[0]
            past_stats, _ = gather_stats(past_data, fields)

            past_sd = np.std(past_stats)
            past_mean = np.mean(past_stats)
            if abs(current - past_mean) > past_sd:
                exp_alerts[exp].append((fields, past_mean, past_sd, current))

        if not exp_alerts[exp]:
            del exp_alerts[exp]

    if exp_alerts:
        report = {
            'title': 'High SD Alerts',
            'value': format_report(info, exp_alerts, pings)
        }
        write_json(output_dir, 'report.json', report)

    write_status(output_dir, True, 'success')
Пример #27
0
def main():

    global config, options, curid, count
    curid = None

    count = Counter()

    def warn(message):
        global curid, count
        if curid:
            print "%s:" % curid
            curid = None
            count['app'] += 1
        print '    %s' % message
        count['warn'] += 1

    def pwarn(message):
        if options.pedantic:
            warn(message)

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-p", "--pedantic", action="store_true", default=False,
                      help="Show pedantic warnings that might give false positives")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=False)
    apps = common.read_app_args(args, allapps, False)

    for appid, app in apps.iteritems():
        if app['Disabled']:
            continue

        curid = appid
        count['app_total'] += 1

        curbuild = None
        for build in app['builds']:
            if not curbuild or int(build['vercode']) > int(curbuild['vercode']):
                curbuild = build

        # Potentially incorrect UCM
        if (curbuild and curbuild['commit']
                and app['Update Check Mode'] == 'RepoManifest' and
                any(s in curbuild['commit'] for s in '.,_-/')):
            pwarn("Last used commit '%s' looks like a tag, but Update Check Mode is '%s'" % (
                curbuild['commit'], app['Update Check Mode']))

        # Dangerous auto updates
        if curbuild and app['Auto Update Mode'] != 'None':
            for flag in ['target', 'srclibs', 'scanignore']:
                if curbuild[flag]:
                    pwarn("Auto Update Mode is enabled but '%s' is manually set at '%s'" % (flag, curbuild[flag]))

        # Summary size limit
        summ_chars = len(app['Summary'])
        if summ_chars > config['char_limits']['Summary']:
            warn("Summary of length %s is over the %i char limit" % (
                summ_chars, config['char_limits']['Summary']))

        # Redundant info
        if app['Web Site'] and app['Source Code']:
            if app['Web Site'].lower() == app['Source Code'].lower():
                warn("Website '%s' is just the app's source code link" % app['Web Site'])
                app['Web Site'] = ''

        name = app['Name'] or app['Auto Name']
        if app['Summary'] and name:
            if app['Summary'].lower() == name.lower():
                warn("Summary '%s' is just the app's name" % app['Summary'])

        if app['Summary'] and app['Description'] and len(app['Description']) == 1:
            if app['Summary'].lower() == app['Description'][0].lower():
                warn("Description '%s' is just the app's summary" % app['Summary'])

        # Description size limit
        desc_chars = sum(len(l) for l in app['Description'])
        if desc_chars > config['char_limits']['Description']:
            warn("Description of length %s is over the %i char limit" % (
                desc_chars, config['char_limits']['Description']))

        # Regex checks in all kinds of fields
        for f in regex_warnings:
            for m, r in regex_warnings[f]:
                t = metadata.metafieldtype(f)
                if t == 'string':
                    if m.match(app[f]):
                        warn("%s '%s': %s" % (f, app[f], r))
                elif t == 'multiline':
                    for l in app[f]:
                        if m.match(l):
                            warn("%s at line '%s': %s" % (f, l, r))

        # Regex pedantic checks in all kinds of fields
        if options.pedantic:
            for f in regex_pedantic:
                for m, r in regex_pedantic[f]:
                    if m.match(app[f]):
                        warn("%s '%s': %s" % (f, app[f], r))

        # Build warnings
        for build in app['builds']:
            if build['disable']:
                continue
            for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
                if build['commit'] and build['commit'].startswith(s):
                    warn("Branch '%s' used as commit in build '%s'" % (
                        s, build['version']))
                for srclib in build['srclibs']:
                    ref = srclib.split('@')[1].split('/')[0]
                    if ref.startswith(s):
                        warn("Branch '%s' used as commit in srclib '%s'" % (
                            s, srclib))
            for s in ['git clone', 'git svn clone', 'svn checkout', 'svn co', 'hg clone']:
                for flag in ['init', 'prebuild', 'build']:
                    if not build[flag]:
                        continue
                    if s in build[flag]:
                        # TODO: This should not be pedantic!
                        pwarn("'%s' used in %s '%s'" % (s, flag, build[flag]))

        if not curid:
            print

    logging.info("Found a total of %i warnings in %i apps out of %i total." % (
        count['warn'], count['app'], count['app_total']))
Пример #28
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-u", "--url", default=None,
                      help="Project URL to import from.")
    parser.add_option("-s", "--subdir", default=None,
                      help="Path to main android project subdirectory, if not in root.")
    parser.add_option("-r", "--repo", default=None,
                      help="Allows a different repo to be specified for a multi-repo google code project")
    parser.add_option("--rev", default=None,
                      help="Allows a different revision (or git branch) to be specified for the initial import")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if not options.url:
        print "Specify project url."
        sys.exit(1)
    url = options.url

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        print "Creating temporary directory"
        os.makedirs(tmp_dir)

    # Get all apps...
    apps = metadata.read_metadata()

    # Figure out what kind of project it is...
    projecttype = None
    issuetracker = None
    license = None
    website = url #by default, we might override it
    if url.startswith('git://'):
        projecttype = 'git'
        repo = url
        repotype = 'git'
        sourcecode = ""
        website = ""
    elif url.startswith('https://github.com'):
        if url.endswith('/'):
            url = url[:-1]
        if url.endswith('.git'):
            print "A github URL should point to the project, not the git repo"
            sys.exit(1)
        projecttype = 'github'
        repo = url + '.git'
        repotype = 'git'
        sourcecode = url
        issuetracker = url + '/issues'
    elif url.startswith('https://gitorious.org/'):
        projecttype = 'gitorious'
        repo = 'https://git.gitorious.org/' + url[22:] + '.git'
        repotype = 'git'
        sourcecode = url
    elif url.startswith('https://bitbucket.org/'):
        if url.endswith('/'):
            url = url[:-1]
        projecttype = 'bitbucket'
        sourcecode = url + '/src'
        issuetracker = url + '/issues'
        # Figure out the repo type and adddress...
        repotype, repo = getrepofrompage(sourcecode)
        if not repotype:
            print "Unable to determine vcs type. " + repo
            sys.exit(1)
    elif url.startswith('http://code.google.com/p/'):
        if not url.endswith('/'):
            url += '/';
        projecttype = 'googlecode'
        sourcecode = url + 'source/checkout'
        if options.repo:
            sourcecode += "?repo=" + options.repo
        issuetracker = url + 'issues/list'

        # Figure out the repo type and adddress...
        repotype, repo = getrepofrompage(sourcecode)
        if not repotype:
            print "Unable to determine vcs type. " + repo
            sys.exit(1)

        # Figure out the license...
        req = urllib.urlopen(url)
        if req.getcode() != 200:
            print 'Unable to find project page at ' + sourcecode + ' - return code ' + str(req.getcode())
            sys.exit(1)
        page = req.read()
        index = page.find('Code license')
        if index == -1:
            print "Couldn't find license data"
            sys.exit(1)
        ltext = page[index:]
        lprefix = 'rel="nofollow">'
        index = ltext.find(lprefix)
        if index == -1:
            print "Couldn't find license text"
            sys.exit(1)
        ltext = ltext[index + len(lprefix):]
        index = ltext.find('<')
        if index == -1:
            print "License text not formatted as expected"
            sys.exit(1)
        ltext = ltext[:index]
        if ltext == 'GNU GPL v3':
            license = 'GPLv3'
        elif ltext == 'GNU GPL v2':
            license = 'GPLv2'
        elif ltext == 'Apache License 2.0':
            license = 'Apache2'
        elif ltext == 'MIT License':
            license = 'MIT'
        elif ltext == 'GNU Lesser GPL':
            license = 'LGPL'
        elif ltext == 'Mozilla Public License 1.1':
            license = 'MPL'
        elif ltext == 'New BSD License':
            license = 'NewBSD'
        else:
            print "License " + ltext + " is not recognised"
            sys.exit(1)

    if not projecttype:
        print "Unable to determine the project type."
        print "The URL you supplied was not in one of the supported formats. Please consult"
        print "the manual for a list of supported formats, and supply one of those."
        sys.exit(1)

    # Get a copy of the source so we can extract some info...
    print 'Getting source from ' + repotype + ' repo at ' + repo
    src_dir = os.path.join(tmp_dir, 'importer')
    if os.path.exists(src_dir):
        shutil.rmtree(src_dir)
    vcs = common.getvcs(repotype, repo, src_dir)
    vcs.gotorevision(options.rev)
    if options.subdir:
        root_dir = os.path.join(src_dir, options.subdir)
    else:
        root_dir = src_dir

    # Extract some information...
    paths = common.manifest_paths(root_dir, None)
    if paths:

        version, vercode, package = common.parse_androidmanifests(paths)
        if not package:
            print "Couldn't find package ID"
            sys.exit(1)
        if not version:
            print "WARNING: Couldn't find latest version name"
        if not vercode:
            print "WARNING: Couldn't find latest version code"
    else:
        spec = os.path.join(root_dir, 'buildozer.spec')
        if os.path.exists(spec):
            defaults = {'orientation': 'landscape', 'icon': '', 
                    'permissions': '', 'android.api': "18"}
            bconfig = ConfigParser(defaults, allow_no_value=True)
            bconfig.read(spec)
            package = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name')
            version = bconfig.get('app', 'version')
            vercode = None
        else:
            print "No android or kivy project could be found. Specify --subdir?"
            sys.exit(1)

    # Make sure it's actually new...
    for app in apps:
        if app['id'] == package:
            print "Package " + package + " already exists"
            sys.exit(1)

    # Construct the metadata...
    app = metadata.parse_metadata(None)
    app['id'] = package
    app['Web Site'] = website
    app['Source Code'] = sourcecode
    if issuetracker:
        app['Issue Tracker'] = issuetracker
    if license:
        app['License'] = license
    app['Repo Type'] = repotype
    app['Repo'] = repo
    app['Update Check Mode'] = "Tags"

    # Create a build line...
    build = {}
    build['version'] = version if version else '?'
    build['vercode'] = vercode if vercode else '?'
    build['commit'] = '?'
    build['disable'] = 'Generated by import.py - check/set version fields and commit id'
    if options.subdir:
        build['subdir'] = options.subdir
    if os.path.exists(os.path.join(root_dir, 'jni')):
        build['buildjni'] = 'yes'
    app['builds'].append(build)

    # Keep the repo directory to save bandwidth...
    if not os.path.exists('build'):
        os.mkdir('build')
    shutil.move(src_dir, os.path.join('build', package))
    with open('build/.fdroidvcs-' + package, 'w') as f:
        f.write(repotype + ' ' + repo)

    metafile = os.path.join('metadata', package + '.txt')
    metadata.write_metadata(metafile, app)
    print "Wrote " + metafile
Пример #29
0
def main(config_dir, home_dir, output_dir):
    config = read_config(config_dir)
    if 'webhook_url' not in config:
        write_status(output_dir, False, 'No webhook URL given')
        return 1

    webhook = config['webhook_url']

    info = DashboardInfo(home_dir)

    failed_subsys = []
    reports = []
    failed_reports = []

    for subsys in info.all_present_subsystems():
        # ignore self
        if subsys == 'subsys_reporter':
            continue

        if not info.subsys_active(subsys):
            continue

        status = info.subsys_stage_status(subsys, 'run')
        if not status['success']:
            failed_subsys.append(failed_subsys_field(subsys, status))
            continue

        report_present = check_file_exists(info.subsys_output_dir(subsys), 'report.json')
        if not report_present:
            continue

        try:
            report = read_json(info.subsys_output_dir(subsys), 'report.json')
            reports.append(build_field(
                title=report['title'],
                value=report['value']))
        except Exception:
            failed_reports.append(subsys_name)

    attachments = []
    if reports:
        attachments.append(build_attachment(
            title='Reports',
            fields=reports))
    if failed_reports or failed_subsys:
        failure_text = ''
        if failed_reports:
            failure_text = 'Failed to parse reports: {}'.format(', '.join(failed_reports))
        attachments.append(build_attachment(
            title='Errors',
            text=failure_text,
            color='#fa0000',
            fields=failed_subsys))

    if not attachments:
        write_status(output_dir, True, 'Nothing to report')
        return 0

    success, msg = post_message(
        webhook,
        build_message(
            text='Subsystem Results',
            attachments=attachments))
    write_status(output_dir, success, msg)
Пример #30
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id to check for updates")
    parser.add_argument("--auto", action="store_true", default=False,
                        help="Process auto-updates")
    parser.add_argument("--autoonly", action="store_true", default=False,
                        help="Only process apps with auto-updates")
    parser.add_argument("--commit", action="store_true", default=False,
                        help="Commit changes")
    parser.add_argument("--gplay", action="store_true", default=False,
                        help="Only print differences with the Play Store")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata()

    apps = common.read_app_args(options.appid, allapps, False)

    if options.gplay:
        for app in apps:
            version, reason = check_gplay(app)
            if version is None:
                if reason == '404':
                    logging.info("{0} is not in the Play Store".format(common.getappname(app)))
                else:
                    logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
            if version is not None:
                stored = app.CurrentVersion
                if not stored:
                    logging.info("{0} has no Current Version but has version {1} on the Play Store"
                                 .format(common.getappname(app), version))
                elif LooseVersion(stored) < LooseVersion(version):
                    logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
                                 .format(common.getappname(app), version, stored))
                else:
                    if stored != version:
                        logging.info("{0} has version {1} on the Play Store, which differs from {2}"
                                     .format(common.getappname(app), version, stored))
                    else:
                        logging.info("{0} has the same version {1} on the Play Store"
                                     .format(common.getappname(app), version))
        return

    for appid, app in apps.iteritems():

        if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
            logging.debug("Nothing to do for {0}...".format(appid))
            continue

        logging.info("Processing " + appid + '...')

        checkupdates_app(app)

    logging.info("Finished.")
Пример #31
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser(
        usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-a",
                      "--all",
                      action="store_true",
                      default=False,
                      help="Install all signed applications available")
    (options, args) = parser.parse_args()

    if not args and not options.all:
        raise OptionError(
            "If you really want to install all the signed apps, use --all",
            "all")

    config = common.read_config(options)

    output_dir = 'repo'
    if not os.path.isdir(output_dir):
        logging.info("No signed output directory - nothing to do")
        sys.exit(0)

    if args:

        vercodes = common.read_pkg_args(args, True)
        apks = {appid: None for appid in vercodes}

        # Get the signed apk with the highest vercode
        for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):

            try:
                appid, vercode = common.apknameinfo(apkfile)
            except FDroidException:
                continue
            if appid not in apks:
                continue
            if vercodes[appid] and vercode not in vercodes[appid]:
                continue
            apks[appid] = apkfile

        for appid, apk in apks.iteritems():
            if not apk:
                raise FDroidException("No signed apk available for %s" % appid)

    else:

        apks = {
            common.apknameinfo(apkfile)[0]: apkfile
            for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk')))
        }

    for appid, apk in apks.iteritems():
        # Get device list each time to avoid device not found errors
        devs = devices()
        if not devs:
            raise FDroidException("No attached devices found")
        logging.info("Installing %s..." % apk)
        for dev in devs:
            logging.info("Installing %s on %s..." % (apk, dev))
            p = SdkToolsPopen(['adb', "-s", dev, "install", apk])
            fail = ""
            for line in p.output.splitlines():
                if line.startswith("Failure"):
                    fail = line[9:-1]
            if not fail:
                continue

            if fail == "INSTALL_FAILED_ALREADY_EXISTS":
                logging.warn("%s is already installed on %s." % (apk, dev))
            else:
                raise FDroidException("Failed to install %s on %s: %s" %
                                      (apk, dev, fail))

    logging.info("\nFinished")
Пример #32
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(
        usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Read all app and srclib metadata
    allapps = metadata.read_metadata()
    apps = common.read_app_args(args, allapps, True)

    problems = []

    build_dir = 'build'
    if not os.path.isdir(build_dir):
        logging.info("Creating build directory")
        os.makedirs(build_dir)
    srclib_dir = os.path.join(build_dir, 'srclib')
    extlib_dir = os.path.join(build_dir, 'extlib')

    for appid, app in apps.iteritems():

        if app['Disabled']:
            logging.info("Skipping %s: disabled" % appid)
            continue
        if not app['builds']:
            logging.info("Skipping %s: no builds specified" % appid)
            continue

        logging.info("Processing " + appid)

        try:

            build_dir = 'build/' + appid

            # Set up vcs interface and make sure we have the latest code...
            vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)

            for thisbuild in app['builds']:

                if thisbuild['disable']:
                    logging.info(
                        "...skipping version %s - %s" %
                        (thisbuild['version'],
                         thisbuild.get('disable', thisbuild['commit'][1:])))
                else:
                    logging.info("...scanning version " + thisbuild['version'])

                    # Prepare the source code...
                    root_dir, _ = common.prepare_source(
                        vcs, app, thisbuild, build_dir, srclib_dir, extlib_dir,
                        False)

                    # Do the scan...
                    buildprobs = common.scan_source(build_dir, root_dir,
                                                    thisbuild)
                    for problem in buildprobs:
                        problems.append(problem + ' in ' + appid + ' ' +
                                        thisbuild['version'])

        except BuildException as be:
            msg = "Could not scan app %s due to BuildException: %s" % (appid,
                                                                       be)
            problems.append(msg)
        except VCSException as vcse:
            msg = "VCS error while scanning app %s: %s" % (appid, vcse)
            problems.append(msg)
        except Exception:
            msg = "Could not scan app %s due to unknown error: %s" % (
                appid, traceback.format_exc())
            problems.append(msg)

    logging.info("Finished:")
    for problem in problems:
        print problem
    print str(len(problems)) + ' problems.'
Пример #33
0
def _check_writeup(writeup_file):
    writeup = writeup_file.read()

    # Check if writeup.md starts with the challenge name in H1 style.
    config = read_config('config.yml')
    config_challenge_name = config['name']

    h1_pattern = '^%s\n={1,}\n\n' % re.escape(config_challenge_name)
    try:
        writeup_challenge_name = re.search('^.*\n', writeup).group()[:-1]
        assert writeup_challenge_name == config_challenge_name
        re.search(h1_pattern, writeup).group()
    except AssertionError:
        logging.warning(
            'The challenge name in writeup.md (%s) and config.yml (%s) should be the same.'
            % (writeup_challenge_name, config_challenge_name))
    except Exception:
        logging.warning(
            'The challenge names in writeup.md should be in H1 style. For example:\n\n'
            'Challenge name\n==============\n')

    # Check if costs and H2 sections are correct in writeup.md
    cost_pattern = '\nCost: [0-9]{1,2}%\n'
    costs = [
        int(re.search(r'[0-9]{1,2}', cost).group())
        for cost in re.findall(cost_pattern, writeup)
    ]
    if sum(costs) != COST_SUM:
        counted_error(
            'The sum of costs in writeup.md should be %d%%.\n\tPlease make sure you have the following '
            'format (take care of the white spaces, starting and ending newlines):\n%s'
            % (COST_SUM, cost_pattern))

    h2_pattern = r'\n## [A-Z].{5,150}\n'
    h2 = re.findall(h2_pattern, writeup)

    if len(h2) < MIN_WRITEUP_SECTIONS:
        counted_error('There should be at least %d sections in writeup.md' %
                      MIN_WRITEUP_SECTIONS)

    h2_last = h2.pop()
    if h2_last != '\n## Complete solution\n':
        counted_error(
            'The last section should be called "Complete solution" in writeup.md'
        )

    h2costs = [
        re.search(h2_pattern, h2cost).group()
        for h2cost in re.findall(h2_pattern + cost_pattern, writeup)
    ]

    missing_costs = set(h2) - set(h2costs)
    if len(missing_costs) > 0:
        counted_error(
            'No cost is defined in writeup.md for section(s): %s\n\tPlease make sure you have the following '
            'format (take care of the starting and ending newlines):\n%s' %
            (missing_costs, cost_pattern))

    reference_style_links_pattern = r'(\[.*?\]\[.*?\])'
    if re.search(reference_style_links_pattern, writeup) is not None:
        logging.warning(
            'The writeup contains reference style links like [this one][0], which might render incorrectly.\n\t'
            'Please use inline style ones like [this](http://example.net/)')
Пример #34
0
def main(config_dir, home_dir, output_dir):
    config = read_config(config_dir)
    if 'channel_id' not in config:
        write_status(output_dir, False, 'No channel token given')
        return 1

    success, msg, client = new_client(config)
    info = DashboardInfo(home_dir)

    if not success:
        write_status(output_dir, False, msg)
        return 1

    slack_channel = config['channel_id']
    description = ''
    if 'description' in config:
        description = config['description']

    info = DashboardInfo(home_dir)

    inactive_experiments = []  # list of titles
    failed_experiments = []  # list of slack fields
    successful_experiments = []  # list of slack fields
    failed_graphs = []  # list of titles

    for exp_name in info.all_present_experiments():
        stage_statuses = info.exp_stage_statuses(exp_name)
        if not stage_statuses['precheck']['success']:
            failed_experiments.append(
                failed_experiment_field(exp_name, stage_statuses, 'precheck'))
            continue

        exp_conf = info.read_exp_config(exp_name)
        exp_status = info.exp_status_dir(exp_name)
        run_status = validate_json(exp_status,
                                   'time_delta',
                                   filename='run.json')

        exp_title = exp_name if 'title' not in exp_conf else exp_conf['title']
        notify = exp_conf['notify']
        if not exp_conf['active']:
            inactive_experiments.append(exp_title)
            continue

        failure = False
        for stage in ['setup', 'run', 'analysis', 'summary']:
            if stage not in stage_statuses:
                # setup is the only stage that's optional
                assert stage == 'setup'
                continue
            if not stage_statuses[stage]['success']:
                failed_experiments.append(
                    failed_experiment_field(
                        exp_title,
                        stage_statuses,
                        stage,
                        duration=run_status.get('time_delta'),
                        notify=notify))
                failure = True
                break

        if failure:
            continue

        # failure to visualize is not as big a deal as failing to
        # run or analyze the experiment, so we only report it but
        # don't fail to report the summary
        if not stage_statuses['visualization']['success']:
            failed_graphs.append(exp_title)

        summary = info.read_exp_summary(exp_name)
        successful_experiments.append(
            build_field(
                summary['title'],
                attach_duration(summary['value'],
                                run_status.get('time_delta'))))

    # produce messages
    attachments = []
    if successful_experiments:
        attachments.append(
            build_attachment(title='Successful benchmarks',
                             fields=successful_experiments))
    if failed_experiments:
        attachments.append(
            build_attachment(color='#fa0000',
                             title='Failed benchmarks',
                             fields=failed_experiments))
    if inactive_experiments:
        attachments.append(
            build_attachment(color='#616161',
                             title='Inactive benchmarks',
                             text=', '.join(inactive_experiments)))
    if failed_graphs:
        attachments.append(
            build_attachment(color='#fa0000',
                             title='Failed to Visualize',
                             text=', '.join(failed_graphs)))

    success, _, report = post_message(
        client, slack_channel,
        build_message(text='*Dashboard Results*{}'.format(
            '\n' + description if description != '' else ''),
                      attachments=attachments))
    if config.get('report_images', False):
        success, msg = send_graphs(config, info, client, output_dir)
        if not success:
            write_status(output_dir, False, msg)
            return 1

    write_status(output_dir, success, report)
Пример #35
0
def check_config(config: dict, is_static):
    invalid_keys = set(config.keys()) - set(CONFIG_KEYS)
    if len(invalid_keys) > 0:
        counted_error('Invalid key(s) found in config.yml: %s' % invalid_keys)

    if config['version'][:1] != 'v':
        counted_error(
            'Invalid version. The version number must start with the letter v')
    elif config['version'] == 'v1':
        counted_error('This version is deprecated, please use v2.0.0')
    elif config['version'] != 'v2.0.0':
        counted_error(
            'Invalid version. The supplied config version is not supported')

    # Difficulty
    try:
        assert DIFFICULTY_RANGE['min'] <= int(
            config['difficulty']) <= DIFFICULTY_RANGE['max']
    except Exception:
        counted_error('Invalid difficulty in config.yml. '
                      'Valid values: %d - %d' %
                      (DIFFICULTY_RANGE['min'], DIFFICULTY_RANGE['max']))

    # Name
    try:
        assert NAME_RANGE['min'] <= len(config['name']) <= NAME_RANGE['max']
    except Exception:
        counted_error('Invalid challenge name in config.yml. '
                      'Name should be a string between %d - %d characters.' %
                      (NAME_RANGE['min'], NAME_RANGE['max']))

    for template_config in glob(
            os.path.join(TOOLBOX_PATH, 'templates', '*', 'config.yml')):
        if config['name'] == read_config(template_config).get('name'):
            counted_error('Please, set the challenge name in the config file.')

    # Skills
    if not isinstance(config['skills'], list):
        counted_error(
            'Invalid skills in config.yml. Skills should be placed into a list.\n'
            '\tValid skills are listed here: \n'
            '\thttps://platform.avatao.com/api-explorer/#/api/core/skills/')

    # Recommendations
    if not isinstance(config['recommendations'], dict):
        counted_error(
            'Invalid recommendations in config.yml. Recommendations should be added in the following '
            'format:\n\n'
            'recommendations:\n'
            '\twww.example.com: \'Example webpage\'\n'
            '\thttp://www.example2.com: \'Example2 webpage\''
            '\thttp://example3.com: \'Example3 webpage\'')

    url_re = re.compile(
        r'(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)'
        r'(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]'
        r'+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?«»“”‘’]))')

    for item in config['recommendations'].items():
        if url_re.fullmatch(item[0]) is None:
            counted_error('Invalid recommended URL (%s) found in config.yml' %
                          item[0])

        if not isinstance(item[1], str):
            counted_error(
                'The name of recommended url (%s) should be a string in config.yml'
                % item[1])

    # Owners
    if not isinstance(config['owners'], list):
        counted_error(
            'Challenge owners (%s) should be placed into a list in config.yml'
            % config['owners'])

    email_re = re.compile(
        r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)")
    for owner in config.get('owners', []):
        if email_re.fullmatch(owner) is None:
            counted_error(
                'Invalid owner email (%s) found in config.yml. '
                'Make sure you list the email addresses of the owners.' %
                owner)

    if not is_static:
        controller_found = False
        for item in config['crp_config'].values():

            if not isinstance(item, dict):
                counted_error('Items of crp_config must be dictionaries.')

            if 'image' in item and item['image'].find('/') < 0:
                counted_error(
                    'If the image is explicitly defined, it must be relative '
                    'to the registry - e.g. challenge:solvable.')

            if 'capabilities' in item:
                invalid_caps = set(item['capabilities']) - CAPABILITIES
                if len(invalid_caps) > 0:
                    counted_error(
                        'Forbidden capabilities: %s\n\tAllowed capabilities: %s',
                        invalid_caps, CAPABILITIES)

            if 'kernel_params' in item:
                invalid_parameters = set(
                    item['kernel_params']) - KERNEL_PARAMETERS
                if len(invalid_parameters) > 0:
                    counted_error(
                        'Forbidden kernel parameters: %s\n\tAllowed parameters: %s',
                        invalid_parameters, KERNEL_PARAMETERS)

            if 'mem_limit' in item:

                if not isinstance(item['mem_limit'], str):
                    counted_error(
                        'Invalid mem_limit value: %s, The mem_limit should be a string like: 100M'
                    )

                if item['mem_limit'][-1] not in "M":
                    counted_error(
                        'Invalid mem_limit value: %s, The mem_limit should be a string ending with '
                        'M (megabytes). No other unit is allowed.')
                try:
                    mem_limit_number_part = int(item['mem_limit'][:-1])
                    if mem_limit_number_part > 999:
                        counted_error(
                            'Invalid mem_limit value: %s, The mem_limit can not be greater than 999M.'
                        )
                except Exception:
                    counted_error(
                        'Invalid mem_limit value: %s, mem_limit must start with a number and end with '
                        'M (megabytes). No other unit is allowed.')

            for port in item.get('ports', []):
                try:
                    port, protocol = port.split('/', 1)
                    try:
                        port = int(port)
                    except Exception:
                        counted_error(
                            'Invalid port. The port should be a number between 1 and 65535.'
                        )

                    if PORT_RANGE['min'] > port or PORT_RANGE['max'] < port:
                        counted_error(
                            'Invalid port. The port should be a number between 1 and 65535'
                        )

                    if protocol not in PROTOCOLS:
                        counted_error(
                            'Invalid protocol in config.yml (crp_config): %s. Valid values: %s',
                            protocol, PROTOCOLS)

                    elif protocol == CONTROLLER_PROTOCOL:
                        controller_found = True

                except Exception:
                    counted_error('Invalid port format. [port/protocol]')

        if not config.get('flag') and not controller_found:
            counted_error(
                'Missing controller port [5555/%s] for a dynamic challenge.' %
                CONTROLLER_PROTOCOL)

    if str(config.get('enable_flag_input')).lower() not in ('true', 'false',
                                                            '1', '0'):
        counted_error('Invalid enable_flag_input. Should be a boolean.')

    if is_static:
        try:
            assert isinstance(config['flag'], str)
        except AssertionError:
            counted_error('Invalid flag. Should be a string.')
        except KeyError:
            counted_error(
                'Missing flag. Static challenges must have a static flag set.')

    if 'flag' in config and config['flag'][0:6] == 'regex:':
        try:
            re.compile(config['flag'][6:])
        except Exception:
            counted_error('Failed to compile regex flag.')
Пример #36
0
    argument and the script does the rest.

    Python dependencies:
        - PyYAML (http://pyyaml.org/) or simply `pip3 install PyYAML`
          (on Ubuntu you additionally need `apt-get install python3-yaml`)
    """
    init_logger()
    repo_path, repo_name = get_sys_args()

    os.chdir(repo_path)
    atexit.register(remove_containers)

    proc_list = []
    first = None

    if 'crp_config' not in read_config(repo_path):
        logging.error(
            "There is no crp_config in the config.yml,"
            " if this is a static challenge you don't need to run it.")
        sys.exit(0)

    for short_name, crp_config_item in get_crp_config(repo_path,
                                                      repo_name).items():
        proc, container = run_container(short_name,
                                        crp_config_item,
                                        share_with=first)
        proc_list.append(proc)
        if first is None:
            first = container

    logging.info(
Пример #37
0
def main(config_dir, home_dir, output_dir):
    config = read_config(config_dir)
    if 'webhook_url' not in config:
        write_status(output_dir, False, 'No webhook URL given')
        return 1

    webhook = config['webhook_url']
    description = ''
    if 'description' in config:
        description = config['description']

    info = DashboardInfo(home_dir)

    inactive_experiments = []     # list of titles
    failed_experiments = []       # list of slack fields
    successful_experiments = []   # list of slack fields
    failed_graphs = []            # list of titles

    for exp_name in info.all_present_experiments():
        stage_statuses = info.exp_stage_statuses(exp_name)
        if not stage_statuses['precheck']['success']:
            failed_experiments.append(
                failed_experiment_field(exp_name, stage_statuses, 'precheck'))
            continue

        exp_conf = info.read_exp_config(exp_name)

        exp_title = exp_name if 'title' not in exp_conf else exp_conf['title']
        notify = exp_conf['notify']
        if not exp_conf['active']:
            inactive_experiments.append(exp_title)
            continue

        failure = False
        for stage in ['setup', 'run', 'analysis', 'summary']:
            if stage not in stage_statuses:
                # setup is the only stage that's optional
                assert stage == 'setup'
                continue
            if not stage_statuses[stage]['success']:
                failed_experiments.append(
                    failed_experiment_field(exp_title, stage_statuses,
                                            stage, notify))
                failure = True
                break

        if failure:
            continue

        # failure to visualize is not as big a deal as failing to
        # run or analyze the experiment, so we only report it but
        # don't fail to report the summary
        if not stage_statuses['visualization']['success']:
            failed_graphs.append(exp_title)

        summary = info.read_exp_summary(exp_name)
        successful_experiments.append(
            build_field(summary['title'], summary['value']))

    # produce messages
    attachments = []
    if successful_experiments:
        attachments.append(
            build_attachment(
                title='Successful benchmarks',
                pretext=description,
                fields=successful_experiments))
    if failed_experiments:
        attachments.append(
            build_attachment(
                color='#fa0000',
                title='Failed benchmarks',
                fields=failed_experiments))
    if inactive_experiments:
        attachments.append(
            build_attachment(
                color='#616161',
                title='Inactive benchmarks',
                text=', '.join(inactive_experiments)))
    if failed_graphs:
        attachments.append(
            build_attachment(
                color='#fa0000',
                title='Failed to Visualize',
                text=', '.join(failed_graphs)))

    success, report = post_message(
        webhook,
        build_message(
            text='Dashboard Results',
            attachments=attachments))
    write_status(output_dir, success, report)
Пример #38
0
def main():
    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option(
        "-i",
        "--identity-file",
        default=None,
        help="Specify an identity file to provide to SSH for rsyncing")
    parser.add_option("--local-copy-dir",
                      default=None,
                      help="Specify a local folder to sync the repo to")
    parser.add_option(
        "--sync-from-local-copy-dir",
        action="store_true",
        default=False,
        help="Before uploading to servers, sync from local copy dir")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("--no-checksum",
                      action="store_true",
                      default=False,
                      help="Don't use rsync checksums")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if len(args) != 1:
        logging.critical("Specify a single command")
        sys.exit(1)

    if args[0] != 'init' and args[0] != 'update':
        logging.critical(
            "The only commands currently supported are 'init' and 'update'")
        sys.exit(1)

    if config.get('nonstandardwebroot') is True:
        standardwebroot = False
    else:
        standardwebroot = True

    for serverwebroot in config.get('serverwebroot', []):
        # this supports both an ssh host:path and just a path
        s = serverwebroot.rstrip('/').split(':')
        if len(s) == 1:
            fdroiddir = s[0]
        elif len(s) == 2:
            host, fdroiddir = s
        else:
            logging.error('Malformed serverwebroot line: ' + serverwebroot)
            sys.exit(1)
        repobase = os.path.basename(fdroiddir)
        if standardwebroot and repobase != 'fdroid':
            logging.error('serverwebroot path does not end with "fdroid", ' +
                          'perhaps you meant one of these:\n\t' +
                          serverwebroot.rstrip('/') + '/fdroid\n\t' +
                          serverwebroot.rstrip('/').rstrip(repobase) +
                          'fdroid')
            sys.exit(1)

    if options.local_copy_dir is not None:
        local_copy_dir = options.local_copy_dir
    elif config.get('local_copy_dir'):
        local_copy_dir = config['local_copy_dir']
    else:
        local_copy_dir = None
    if local_copy_dir is not None:
        fdroiddir = local_copy_dir.rstrip('/')
        if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
            logging.error('local_copy_dir must be directory, not a file!')
            sys.exit(1)
        if not os.path.exists(os.path.dirname(fdroiddir)):
            logging.error('The root dir for local_copy_dir "' +
                          os.path.dirname(fdroiddir) + '" does not exist!')
            sys.exit(1)
        if not os.path.isabs(fdroiddir):
            logging.error('local_copy_dir must be an absolute path!')
            sys.exit(1)
        repobase = os.path.basename(fdroiddir)
        if standardwebroot and repobase != 'fdroid':
            logging.error('local_copy_dir does not end with "fdroid", ' +
                          'perhaps you meant: ' + fdroiddir + '/fdroid')
            sys.exit(1)
        if local_copy_dir[-1] != '/':
            local_copy_dir += '/'
        local_copy_dir = local_copy_dir.replace('//', '/')
        if not os.path.exists(fdroiddir):
            os.mkdir(fdroiddir)

    if not config.get('awsbucket') \
            and not config.get('serverwebroot') \
            and local_copy_dir is None:
        logging.warn('No serverwebroot, local_copy_dir, or awsbucket set!' +
                     'Edit your config.py to set at least one.')
        sys.exit(1)

    repo_sections = ['repo']
    if config['archive_older'] != 0:
        repo_sections.append('archive')
        if not os.path.exists('archive'):
            os.mkdir('archive')
    if config['per_app_repos']:
        repo_sections += common.get_per_app_repos()

    if args[0] == 'init':
        ssh = paramiko.SSHClient()
        ssh.load_system_host_keys()
        for serverwebroot in config.get('serverwebroot', []):
            sshstr, remotepath = serverwebroot.rstrip('/').split(':')
            if sshstr.find('@') >= 0:
                username, hostname = sshstr.split('@')
            else:
                username = pwd.getpwuid(os.getuid())[0]  # get effective uid
                hostname = sshstr
            ssh.connect(hostname, username=username)
            sftp = ssh.open_sftp()
            if os.path.basename(remotepath) \
                    not in sftp.listdir(os.path.dirname(remotepath)):
                sftp.mkdir(remotepath, mode=0755)
            for repo_section in repo_sections:
                repo_path = os.path.join(remotepath, repo_section)
                if os.path.basename(repo_path) \
                        not in sftp.listdir(remotepath):
                    sftp.mkdir(repo_path, mode=0755)
            sftp.close()
            ssh.close()
    elif args[0] == 'update':
        for repo_section in repo_sections:
            if local_copy_dir is not None:
                if config['sync_from_local_copy_dir'] and os.path.exists(
                        repo_section):
                    sync_from_localcopy(repo_section, local_copy_dir)
                else:
                    update_localcopy(repo_section, local_copy_dir)
            for serverwebroot in config.get('serverwebroot', []):
                update_serverwebroot(serverwebroot, repo_section)
            if config.get('awsbucket'):
                update_awsbucket(repo_section)

    sys.exit(0)
Пример #39
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-d", "--distinguished-name", default=None,
                      help="X.509 'Distiguished Name' used when generating keys")
    parser.add_option("--keystore", default=None,
                      help="Path to the keystore for the repo signing key")
    parser.add_option("--repo-keyalias", default=None,
                      help="Alias of the repo signing key in the keystore")
    parser.add_option("--android-home", default=None,
                      help="Path to the Android SDK (sometimes set in ANDROID_HOME)")
    parser.add_option("--no-prompt", action="store_true", default=False,
                      help="Do not prompt for Android SDK path, just fail")
    (options, args) = parser.parse_args()

    # find root install prefix
    tmp = os.path.dirname(sys.argv[0])
    if os.path.basename(tmp) == 'bin':
        prefix = os.path.dirname(tmp)
        examplesdir = prefix + '/share/doc/fdroidserver/examples'
    else:
        # we're running straight out of the git repo
        prefix = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
        examplesdir = prefix + '/examples'

    fdroiddir = os.getcwd()
    test_config = common.get_default_config()

    # track down where the Android SDK is, the default is to use the path set
    # in ANDROID_HOME if that exists, otherwise None
    if options.android_home is not None:
        test_config['sdk_path'] = options.android_home
    elif not common.test_sdk_exists(test_config):
        # if neither --android-home nor the default sdk_path exist, prompt the user
        default_sdk_path = '/opt/android-sdk'
        while not options.no_prompt:
            try:
                s = raw_input('Enter the path to the Android SDK ('
                              + default_sdk_path + ') here:\n> ')
            except KeyboardInterrupt:
                print('')
                sys.exit(1)
            if re.match('^\s*$', s) is not None:
                test_config['sdk_path'] = default_sdk_path
            else:
                test_config['sdk_path'] = s
            if common.test_sdk_exists(test_config):
                break
    if not common.test_sdk_exists(test_config):
        sys.exit(3)

    if not os.path.exists('config.py'):
        # 'metadata' and 'tmp' are created in fdroid
        if not os.path.exists('repo'):
            os.mkdir('repo')
        shutil.copy(os.path.join(examplesdir, 'fdroid-icon.png'), fdroiddir)
        shutil.copyfile(os.path.join(examplesdir, 'config.py'), 'config.py')
        os.chmod('config.py', 0o0600)
        write_to_config('sdk_path', test_config['sdk_path'])
    else:
        logging.warn('Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...')
        logging.info('Try running `fdroid init` in an empty directory.')
        sys.exit()

    # try to find a working aapt, in all the recent possible paths
    build_tools = os.path.join(test_config['sdk_path'], 'build-tools')
    aaptdirs = []
    aaptdirs.append(os.path.join(build_tools, test_config['build_tools']))
    aaptdirs.append(build_tools)
    for f in os.listdir(build_tools):
        if os.path.isdir(os.path.join(build_tools, f)):
            aaptdirs.append(os.path.join(build_tools, f))
    for d in sorted(aaptdirs, reverse=True):
        if os.path.isfile(os.path.join(d, 'aapt')):
            aapt = os.path.join(d, 'aapt')
            break
    if os.path.isfile(aapt):
        dirname = os.path.basename(os.path.dirname(aapt))
        if dirname == 'build-tools':
            # this is the old layout, before versioned build-tools
            test_config['build_tools'] = ''
        else:
            test_config['build_tools'] = dirname
        write_to_config('build_tools', test_config['build_tools'])
    if not common.test_build_tools_exists(test_config):
        sys.exit(3)

    # now that we have a local config.py, read configuration...
    config = common.read_config(options)

    # track down where the Android NDK is
    ndk_path = '/opt/android-ndk'
    if os.path.isdir(config['ndk_path']):
        ndk_path = config['ndk_path']
    elif 'ANDROID_NDK' in os.environ.keys():
        logging.info('using ANDROID_NDK')
        ndk_path = os.environ['ANDROID_NDK']
    if os.path.isdir(ndk_path):
        write_to_config('ndk_path', ndk_path)
    # the NDK is optional so we don't prompt the user for it if its not found

    # find or generate the keystore for the repo signing key. First try the
    # path written in the default config.py.  Then check if the user has
    # specified a path from the command line, which will trump all others.
    # Otherwise, create ~/.local/share/fdroidserver and stick it in there.  If
    # keystore is set to NONE, that means that Java will look for keys in a
    # Hardware Security Module aka Smartcard.
    keystore = config['keystore']
    if options.keystore:
        keystore = os.path.abspath(options.keystore)
        if options.keystore == 'NONE':
            keystore = options.keystore
        else:
            keystore = os.path.abspath(options.keystore)
            if not os.path.exists(keystore):
                logging.info('"' + keystore
                             + '" does not exist, creating a new keystore there.')
    write_to_config('keystore', keystore)
    repo_keyalias = None
    if options.repo_keyalias:
        repo_keyalias = options.repo_keyalias
        write_to_config('repo_keyalias', repo_keyalias)
    if options.distinguished_name:
        keydname = options.distinguished_name
        write_to_config('keydname', keydname)
    if keystore == 'NONE':  # we're using a smartcard
        write_to_config('repo_keyalias', '1')  # seems to be the default
        disable_in_config('keypass', 'never used with smartcard')
        write_to_config('smartcardoptions',
                        ('-storetype PKCS11 -providerName SunPKCS11-OpenSC '
                         + '-providerClass sun.security.pkcs11.SunPKCS11 '
                         + '-providerArg opensc-fdroid.cfg'))
        # find opensc-pkcs11.so
        if not os.path.exists('opensc-fdroid.cfg'):
            if os.path.exists('/usr/lib/opensc-pkcs11.so'):
                opensc_so = '/usr/lib/opensc-pkcs11.so'
            elif os.path.exists('/usr/lib64/opensc-pkcs11.so'):
                opensc_so = '/usr/lib64/opensc-pkcs11.so'
            else:
                files = glob.glob('/usr/lib/' + os.uname()[4] + '-*-gnu/opensc-pkcs11.so')
                if len(files) > 0:
                    opensc_so = files[0]
                else:
                    opensc_so = '/usr/lib/opensc-pkcs11.so'
                    logging.warn('No OpenSC PKCS#11 module found, ' +
                                 'install OpenSC then edit "opensc-fdroid.cfg"!')
            with open(os.path.join(examplesdir, 'opensc-fdroid.cfg'), 'r') as f:
                opensc_fdroid = f.read()
            opensc_fdroid = re.sub('^library.*', 'library = ' + opensc_so, opensc_fdroid,
                                   flags=re.MULTILINE)
            with open('opensc-fdroid.cfg', 'w') as f:
                f.write(opensc_fdroid)
    elif not os.path.exists(keystore):
        # no existing or specified keystore, generate the whole thing
        keystoredir = os.path.dirname(keystore)
        if not os.path.exists(keystoredir):
            os.makedirs(keystoredir, mode=0o700)
        password = genpassword()
        write_to_config('keystorepass', password)
        write_to_config('keypass', password)
        if options.repo_keyalias is None:
            repo_keyalias = socket.getfqdn()
            write_to_config('repo_keyalias', repo_keyalias)
        if not options.distinguished_name:
            keydname = 'CN=' + repo_keyalias + ', OU=F-Droid'
            write_to_config('keydname', keydname)
        genkey(keystore, repo_keyalias, password, keydname)

    logging.info('Built repo based in "' + fdroiddir + '"')
    logging.info('with this config:')
    logging.info('  Android SDK:\t\t\t' + config['sdk_path'])
    logging.info('  Android SDK Build Tools:\t' + os.path.dirname(aapt))
    logging.info('  Android NDK (optional):\t' + ndk_path)
    logging.info('  Keystore for signing key:\t' + keystore)
    if repo_keyalias is not None:
        logging.info('  Alias for key in store:\t' + repo_keyalias)
    logging.info('\nTo complete the setup, add your APKs to "' +
                 os.path.join(fdroiddir, 'repo') + '"' + '''
then run "fdroid update -c; fdroid update".  You might also want to edit
"config.py" to set the URL, repo name, and more.  You should also set up
a signing key (a temporary one might have been automatically generated).

For more info: https://f-droid.org/manual/fdroid.html#Simple-Binary-Repository
and https://f-droid.org/manual/fdroid.html#Signing
''')
Пример #40
0
def main():
    # parse options
    (options, args) = parse_options()

    if os.isatty(sys.stdin.fileno()):
        raise RuntimeError('Need configuration in stdin.')
    config = common.read_config(sys.stdin)
    conn = common.connect(config.s3)
    bucket = None

    try:
        # setup
        real_stdout = sys.stdout
        sys.stdout = sys.stderr

        # verify all required config items are present
        if 'roundtrip' not in config:
            raise RuntimeError('roundtrip section not found in config')
        for item in ['readers', 'writers', 'duration', 'files', 'bucket']:
            if item not in config.roundtrip:
                raise RuntimeError("Missing roundtrip config item: {item}".format(item=item))
        for item in ['num', 'size', 'stddev']:
            if item not in config.roundtrip.files:
                raise RuntimeError("Missing roundtrip config item: files.{item}".format(item=item))

        seeds = dict(config.roundtrip.get('random_seed', {}))
        seeds.setdefault('main', random.randrange(2**32))

        rand = random.Random(seeds['main'])

        for name in ['names', 'contents', 'writer', 'reader']:
            seeds.setdefault(name, rand.randrange(2**32))

        print 'Using random seeds: {seeds}'.format(seeds=seeds)

        # setup bucket and other objects
        bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30)
        bucket = conn.create_bucket(bucket_name)
        print "Created bucket: {name}".format(name=bucket.name)
        objnames = realistic.names(
            mean=15,
            stddev=4,
            seed=seeds['names'],
            )
        objnames = itertools.islice(objnames, config.roundtrip.files.num)
        objnames = list(objnames)
        files = realistic.files(
            mean=1024 * config.roundtrip.files.size,
            stddev=1024 * config.roundtrip.files.stddev,
            seed=seeds['contents'],
            )
        q = gevent.queue.Queue()

        logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout)

        print "Writing {num} objects with {w} workers...".format(
            num=config.roundtrip.files.num,
            w=config.roundtrip.writers,
            )
        pool = gevent.pool.Pool(size=config.roundtrip.writers)
        start = time.time()
        for objname in objnames:
            fp = next(files)
            pool.spawn(
                writer,
                bucket=bucket,
                objname=objname,
                fp=fp,
                queue=q,
                )
        pool.join()
        stop = time.time()
        elapsed = stop - start
        q.put(dict(
                type='write_done',
                duration=int(round(elapsed * NANOSECOND)),
                ))

        print "Reading {num} objects with {w} workers...".format(
            num=config.roundtrip.files.num,
            w=config.roundtrip.readers,
            )
        # avoid accessing them in the same order as the writing
        rand.shuffle(objnames)
        pool = gevent.pool.Pool(size=config.roundtrip.readers)
        start = time.time()
        for objname in objnames:
            pool.spawn(
                reader,
                bucket=bucket,
                objname=objname,
                queue=q,
                )
        pool.join()
        stop = time.time()
        elapsed = stop - start
        q.put(dict(
                type='read_done',
                duration=int(round(elapsed * NANOSECOND)),
                ))

        q.put(StopIteration)
        logger_g.get()

    finally:
        # cleanup
        if options.cleanup:
            if bucket is not None:
                common.nuke_bucket(bucket)
Пример #41
0
def main():

    global options, config

    # Parse command line...
    parser = ArgumentParser()
    common.setup_global_opts(parser)
    parser.add_argument("-d", "--download", action="store_true", default=False,
                        help="Download logs we don't have")
    parser.add_argument("--recalc", action="store_true", default=False,
                        help="Recalculate aggregate stats - use when changes "
                        "have been made that would invalidate old cached data.")
    parser.add_argument("--nologs", action="store_true", default=False,
                        help="Don't do anything logs-related")
    options = parser.parse_args()

    config = common.read_config(options)

    if not config['update_stats']:
        logging.info("Stats are disabled - set \"update_stats = True\" in your config.py")
        sys.exit(1)

    # Get all metadata-defined apps...
    allmetaapps = [app for app in metadata.read_metadata().itervalues()]
    metaapps = [app for app in allmetaapps if not app.Disabled]

    statsdir = 'stats'
    logsdir = os.path.join(statsdir, 'logs')
    datadir = os.path.join(statsdir, 'data')
    if not os.path.exists(statsdir):
        os.mkdir(statsdir)
    if not os.path.exists(logsdir):
        os.mkdir(logsdir)
    if not os.path.exists(datadir):
        os.mkdir(datadir)

    if options.download:
        # Get any access logs we don't have...
        ssh = None
        ftp = None
        try:
            logging.info('Retrieving logs')
            ssh = paramiko.SSHClient()
            ssh.load_system_host_keys()
            ssh.connect(config['stats_server'], username=config['stats_user'],
                        timeout=10, key_filename=config['webserver_keyfile'])
            ftp = ssh.open_sftp()
            ftp.get_channel().settimeout(60)
            logging.info("...connected")

            ftp.chdir('logs')
            files = ftp.listdir()
            for f in files:
                if f.startswith('access-') and f.endswith('.log.gz'):

                    destpath = os.path.join(logsdir, f)
                    destsize = ftp.stat(f).st_size
                    if (not os.path.exists(destpath) or
                            os.path.getsize(destpath) != destsize):
                        logging.debug("...retrieving " + f)
                        ftp.get(f, destpath)
        except Exception:
            traceback.print_exc()
            sys.exit(1)
        finally:
            # Disconnect
            if ftp is not None:
                ftp.close()
            if ssh is not None:
                ssh.close()

    knownapks = common.KnownApks()
    unknownapks = []

    if not options.nologs:
        # Process logs
        logging.info('Processing logs...')
        appscount = Counter()
        appsvercount = Counter()
        logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] ' + \
            '"GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) ' + \
            '\d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
        logsearch = re.compile(logexpr).search
        for logfile in glob.glob(os.path.join(logsdir, 'access-*.log.gz')):
            logging.debug('...' + logfile)

            # Get the date for this log - e.g. 2012-02-28
            thisdate = os.path.basename(logfile)[7:-7]

            agg_path = os.path.join(datadir, thisdate + '.json')
            if not options.recalc and os.path.exists(agg_path):
                # Use previously calculated aggregate data
                with open(agg_path, 'r') as f:
                    today = json.load(f)

            else:
                # Calculate from logs...

                today = {
                    'apps': Counter(),
                    'appsver': Counter(),
                    'unknown': []
                }

                p = subprocess.Popen(["zcat", logfile], stdout=subprocess.PIPE)
                matches = (logsearch(line) for line in p.stdout)
                for match in matches:
                    if not match:
                        continue
                    if match.group('statuscode') != '200':
                        continue
                    if match.group('ip') in config['stats_ignore']:
                        continue
                    uri = match.group('uri')
                    if not uri.endswith('.apk'):
                        continue
                    _, apkname = os.path.split(uri)
                    app = knownapks.getapp(apkname)
                    if app:
                        appid, _ = app
                        today['apps'][appid] += 1
                        # Strip the '.apk' from apkname
                        appver = apkname[:-4]
                        today['appsver'][appver] += 1
                    else:
                        if apkname not in today['unknown']:
                            today['unknown'].append(apkname)

                # Save calculated aggregate data for today to cache
                with open(agg_path, 'w') as f:
                    json.dump(today, f)

            # Add today's stats (whether cached or recalculated) to the total
            for appid in today['apps']:
                appscount[appid] += today['apps'][appid]
            for appid in today['appsver']:
                appsvercount[appid] += today['appsver'][appid]
            for uk in today['unknown']:
                if uk not in unknownapks:
                    unknownapks.append(uk)

        # Calculate and write stats for total downloads...
        lst = []
        alldownloads = 0
        for appid in appscount:
            count = appscount[appid]
            lst.append(appid + " " + str(count))
            if config['stats_to_carbon']:
                carbon_send('fdroid.download.' + appid.replace('.', '_'),
                            count)
            alldownloads += count
        lst.append("ALL " + str(alldownloads))
        with open(os.path.join(statsdir, 'total_downloads_app.txt'), 'w') as f:
            f.write('# Total downloads by application, since October 2011\n')
            for line in sorted(lst):
                f.write(line + '\n')

        lst = []
        for appver in appsvercount:
            count = appsvercount[appver]
            lst.append(appver + " " + str(count))

        with open(os.path.join(statsdir, 'total_downloads_app_version.txt'), 'w') as f:
            f.write('# Total downloads by application and version, '
                    'since October 2011\n')
            for line in sorted(lst):
                f.write(line + "\n")

    # Calculate and write stats for repo types...
    logging.info("Processing repo types...")
    repotypes = Counter()
    for app in metaapps:
        rtype = app.RepoType or 'none'
        if rtype == 'srclib':
            rtype = common.getsrclibvcs(app.Repo)
        repotypes[rtype] += 1
    with open(os.path.join(statsdir, 'repotypes.txt'), 'w') as f:
        for rtype, count in most_common_stable(repotypes):
            f.write(rtype + ' ' + str(count) + '\n')

    # Calculate and write stats for update check modes...
    logging.info("Processing update check modes...")
    ucms = Counter()
    for app in metaapps:
        checkmode = app.UpdateCheckMode
        if checkmode.startswith('RepoManifest/'):
            checkmode = checkmode[:12]
        if checkmode.startswith('Tags '):
            checkmode = checkmode[:4]
        ucms[checkmode] += 1
    with open(os.path.join(statsdir, 'update_check_modes.txt'), 'w') as f:
        for checkmode, count in most_common_stable(ucms):
            f.write(checkmode + ' ' + str(count) + '\n')

    logging.info("Processing categories...")
    ctgs = Counter()
    for app in metaapps:
        for category in app.Categories:
            ctgs[category] += 1
    with open(os.path.join(statsdir, 'categories.txt'), 'w') as f:
        for category, count in most_common_stable(ctgs):
            f.write(category + ' ' + str(count) + '\n')

    logging.info("Processing antifeatures...")
    afs = Counter()
    for app in metaapps:
        if app.AntiFeatures is None:
            continue
        for antifeature in app.AntiFeatures:
            afs[antifeature] += 1
    with open(os.path.join(statsdir, 'antifeatures.txt'), 'w') as f:
        for antifeature, count in most_common_stable(afs):
            f.write(antifeature + ' ' + str(count) + '\n')

    # Calculate and write stats for licenses...
    logging.info("Processing licenses...")
    licenses = Counter()
    for app in metaapps:
        license = app.License
        licenses[license] += 1
    with open(os.path.join(statsdir, 'licenses.txt'), 'w') as f:
        for license, count in most_common_stable(licenses):
            f.write(license + ' ' + str(count) + '\n')

    # Write list of disabled apps...
    logging.info("Processing disabled apps...")
    disabled = [app.id for app in allmetaapps if app.Disabled]
    with open(os.path.join(statsdir, 'disabled_apps.txt'), 'w') as f:
        for appid in sorted(disabled):
            f.write(appid + '\n')

    # Write list of latest apps added to the repo...
    logging.info("Processing latest apps...")
    latest = knownapks.getlatest(10)
    with open(os.path.join(statsdir, 'latestapps.txt'), 'w') as f:
        for appid in latest:
            f.write(appid + '\n')

    if unknownapks:
        logging.info('\nUnknown apks:')
        for apk in unknownapks:
            logging.info(apk)

    logging.info("Finished.")
Пример #42
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-p", "--package", default=None,
                      help="Verify only the specified package")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        print "Creating temporary directory"
        os.makedirs(tmp_dir)

    unsigned_dir = 'unsigned'
    if not os.path.isdir(unsigned_dir):
        print "No unsigned directory - nothing to do"
        sys.exit(0)

    verified = 0
    notverified = 0

    for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):

        apkfilename = os.path.basename(apkfile)
        i = apkfilename.rfind('_')
        if i == -1:
            raise BuildException("Invalid apk name")
        appid = apkfilename[:i]

        if not options.package or options.package == appid:

            try:

                print "Processing " + apkfilename

                remoteapk = os.path.join(tmp_dir, apkfilename)
                if os.path.exists(remoteapk):
                    os.remove(remoteapk)
                url = 'https://f-droid.org/repo/' + apkfilename
                print "...retrieving " + url
                p = subprocess.Popen(['wget', url],
                    cwd=tmp_dir,
                    stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
                out = p.communicate()[0]
                if p.returncode != 0:
                    raise Exception("Failed to get " + apkfilename)

                thisdir = os.path.join(tmp_dir, 'this_apk')
                thatdir = os.path.join(tmp_dir, 'that_apk')
                for d in [thisdir, thatdir]:
                    if os.path.exists(d):
                        shutil.rmtree(d)
                    os.mkdir(d)

                if subprocess.call(['jar', 'xf',
                    os.path.join("..", "..", unsigned_dir, apkfilename)],
                    cwd=thisdir) != 0:
                    raise Exception("Failed to unpack local build of " + apkfilename)
                if subprocess.call(['jar', 'xf', os.path.join("..", "..", remoteapk)],
                    cwd=thatdir) != 0:
                    raise Exception("Failed to unpack remote build of " + apkfilename)

                p = subprocess.Popen(['diff', '-r', 'this_apk', 'that_apk'],
                    cwd=tmp_dir, stdout=subprocess.PIPE)
                out = p.communicate()[0]
                lines = out.splitlines()
                if len(lines) != 1 or lines[0].find('META-INF') == -1:
                    raise Exception("Unexpected diff output - " + out)

                print "...successfully verified"
                verified += 1

            except Exception, e:
                print "...NOT verified - {0}".format(e)
                notverified += 1
Пример #43
0
def main():

    global options, config

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id with optional versioncode in the form APPID[:VERCODE]")
    parser.add_argument("-a", "--all", action="store_true", default=False,
                        help="Install all signed applications available")
    options = parser.parse_args()

    if not options.appid and not options.all:
        parser.error("option %s: If you really want to install all the signed apps, use --all" % "all")

    config = common.read_config(options)

    output_dir = 'repo'
    if not os.path.isdir(output_dir):
        logging.info("No signed output directory - nothing to do")
        sys.exit(0)

    if options.appid:

        vercodes = common.read_pkg_args(options.appid, True)
        apks = {appid: None for appid in vercodes}

        # Get the signed apk with the highest vercode
        for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):

            try:
                appid, vercode = common.apknameinfo(apkfile)
            except FDroidException:
                continue
            if appid not in apks:
                continue
            if vercodes[appid] and vercode not in vercodes[appid]:
                continue
            apks[appid] = apkfile

        for appid, apk in apks.iteritems():
            if not apk:
                raise FDroidException("No signed apk available for %s" % appid)

    else:

        apks = {common.apknameinfo(apkfile)[0]: apkfile for apkfile in
                sorted(glob.glob(os.path.join(output_dir, '*.apk')))}

    for appid, apk in apks.iteritems():
        # Get device list each time to avoid device not found errors
        devs = devices()
        if not devs:
            raise FDroidException("No attached devices found")
        logging.info("Installing %s..." % apk)
        for dev in devs:
            logging.info("Installing %s on %s..." % (apk, dev))
            p = SdkToolsPopen(['adb', "-s", dev, "install", apk])
            fail = ""
            for line in p.output.splitlines():
                if line.startswith("Failure"):
                    fail = line[9:-1]
            if not fail:
                continue

            if fail == "INSTALL_FAILED_ALREADY_EXISTS":
                logging.warn("%s is already installed on %s." % (apk, dev))
            else:
                raise FDroidException("Failed to install %s on %s: %s" % (
                    apk, dev, fail))

    logging.info("\nFinished")
Пример #44
0

if __name__ == "__main__":

    print("\nStart train XGB model\n")
    parser = argparse.ArgumentParser(description="Train XGB model")
    parser.add_argument("config_filepath",
                        metavar="<config_filepath>",
                        help="Config file")

    args = parser.parse_args()
    config_filepath = args.config_filepath
    assert os.path.exists(
        config_filepath), "Config file is not found at %s" % config_filepath

    config = read_config(config_filepath)

    now = datetime.now()
    config_filename = os.path.basename(config_filepath)
    config_filename = config_filename.replace(".json", "")
    output_path = os.path.abspath(
        os.path.join(
            "..", "output",
            "%s_training_%s" % (now.strftime("%Y%m%d_%H%M"), config_filename)))
    logs_path = output_path
    if not os.path.exists(logs_path):
        os.makedirs(logs_path)

    assert "dataset" in config
    train_df = get_train_df(config["dataset"])
Пример #45
0
 def read_exp_config(self, exp_name):
     return read_config(self.exp_config_dir(exp_name))
Пример #46
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-u",
                      "--url",
                      default=None,
                      help="Project URL to import from.")
    parser.add_option(
        "-s",
        "--subdir",
        default=None,
        help="Path to main android project subdirectory, if not in root.")
    parser.add_option(
        "--rev",
        default=None,
        help=
        "Allows a different revision (or git branch) to be specified for the initial import"
    )
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if not options.url:
        logging.error("Specify project url.")
        sys.exit(1)
    url = options.url

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    # Get all apps...
    apps = metadata.read_metadata()

    # Figure out what kind of project it is...
    projecttype = None
    issuetracker = None
    license = None
    website = url  # by default, we might override it
    if url.startswith('git://'):
        projecttype = 'git'
        repo = url
        repotype = 'git'
        sourcecode = ""
        website = ""
    elif url.startswith('https://github.com'):
        projecttype = 'github'
        repo = url
        repotype = 'git'
        sourcecode = url
        issuetracker = url + '/issues'
        website = ""
    elif url.startswith('https://gitlab.com/'):
        projecttype = 'gitlab'
        repo = url
        repotype = 'git'
        sourcecode = url + '/tree/HEAD'
        issuetracker = url + '/issues'
    elif url.startswith('https://bitbucket.org/'):
        if url.endswith('/'):
            url = url[:-1]
        projecttype = 'bitbucket'
        sourcecode = url + '/src'
        issuetracker = url + '/issues'
        # Figure out the repo type and adddress...
        repotype, repo = getrepofrompage(sourcecode)
        if not repotype:
            logging.error("Unable to determine vcs type. " + repo)
            sys.exit(1)
    if not projecttype:
        logging.error("Unable to determine the project type.")
        logging.error(
            "The URL you supplied was not in one of the supported formats. Please consult"
        )
        logging.error(
            "the manual for a list of supported formats, and supply one of those."
        )
        sys.exit(1)

    # Ensure we have a sensible-looking repo address at this point. If not, we
    # might have got a page format we weren't expecting. (Note that we
    # specifically don't want git@...)
    if ((repotype != 'bzr' and
         (not repo.startswith('http://') and not repo.startswith('https://')
          and not repo.startswith('git://'))) or ' ' in repo):
        logging.error(
            "Repo address '{0}' does not seem to be valid".format(repo))
        sys.exit(1)

    # Get a copy of the source so we can extract some info...
    logging.info('Getting source from ' + repotype + ' repo at ' + repo)
    src_dir = os.path.join(tmp_dir, 'importer')
    if os.path.exists(src_dir):
        shutil.rmtree(src_dir)
    vcs = common.getvcs(repotype, repo, src_dir)
    vcs.gotorevision(options.rev)
    if options.subdir:
        root_dir = os.path.join(src_dir, options.subdir)
    else:
        root_dir = src_dir

    # Extract some information...
    paths = common.manifest_paths(root_dir, [])
    if paths:

        version, vercode, package = common.parse_androidmanifests(paths)
        if not package:
            logging.error("Couldn't find package ID")
            sys.exit(1)
        if not version:
            logging.warn("Couldn't find latest version name")
        if not vercode:
            logging.warn("Couldn't find latest version code")
    else:
        spec = os.path.join(root_dir, 'buildozer.spec')
        if os.path.exists(spec):
            defaults = {
                'orientation': 'landscape',
                'icon': '',
                'permissions': '',
                'android.api': "18"
            }
            bconfig = ConfigParser(defaults, allow_no_value=True)
            bconfig.read(spec)
            package = bconfig.get('app', 'package.domain') + '.' + bconfig.get(
                'app', 'package.name')
            version = bconfig.get('app', 'version')
            vercode = None
        else:
            logging.error(
                "No android or kivy project could be found. Specify --subdir?")
            sys.exit(1)

    # Make sure it's actually new...
    if package in apps:
        logging.error("Package " + package + " already exists")
        sys.exit(1)

    # Construct the metadata...
    app = metadata.parse_metadata(None)[1]
    app['Web Site'] = website
    app['Source Code'] = sourcecode
    if issuetracker:
        app['Issue Tracker'] = issuetracker
    if license:
        app['License'] = license
    app['Repo Type'] = repotype
    app['Repo'] = repo
    app['Update Check Mode'] = "Tags"

    # Create a build line...
    build = {}
    build['version'] = version or '?'
    build['vercode'] = vercode or '?'
    build['commit'] = '?'
    build[
        'disable'] = 'Generated by import.py - check/set version fields and commit id'
    if options.subdir:
        build['subdir'] = options.subdir
    if os.path.exists(os.path.join(root_dir, 'jni')):
        build['buildjni'] = ['yes']

    for flag, value in metadata.flag_defaults.iteritems():
        if flag in build:
            continue
        build[flag] = value

    app['builds'].append(build)

    # Keep the repo directory to save bandwidth...
    if not os.path.exists('build'):
        os.mkdir('build')
    shutil.move(src_dir, os.path.join('build', package))
    with open('build/.fdroidvcs-' + package, 'w') as f:
        f.write(repotype + ' ' + repo)

    metafile = os.path.join('metadata', package + '.txt')
    metadata.write_metadata(metafile, app)
    logging.info("Wrote " + metafile)
Пример #47
0
 def read_subsys_config(self, subsys_name):
     return read_config(self.subsys_config_dir(subsys_name))
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("--auto", action="store_true", default=False,
                      help="Process auto-updates")
    parser.add_option("--autoonly", action="store_true", default=False,
                      help="Only process apps with auto-updates")
    parser.add_option("--commit", action="store_true", default=False,
                      help="Commit changes")
    parser.add_option("--gplay", action="store_true", default=False,
                      help="Only print differences with the Play Store")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata()

    apps = common.read_app_args(args, allapps, False)

    if options.gplay:
        for app in apps:
            version, reason = check_gplay(app)
            if version is None:
                if reason == '404':
                    logging.info("{0} is not in the Play Store".format(common.getappname(app)))
                else:
                    logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
            if version is not None:
                stored = app['Current Version']
                if not stored:
                    logging.info("{0} has no Current Version but has version {1} on the Play Store"
                                 .format(common.getappname(app), version))
                elif LooseVersion(stored) < LooseVersion(version):
                    logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
                                 .format(common.getappname(app), version, stored))
                else:
                    if stored != version:
                        logging.info("{0} has version {1} on the Play Store, which differs from {2}"
                                     .format(common.getappname(app), version, stored))
                    else:
                        logging.info("{0} has the same version {1} on the Play Store"
                                     .format(common.getappname(app), version))
        return

    for appid, app in apps.iteritems():

        if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
            logging.debug("Nothing to do for {0}...".format(appid))
            continue

        logging.info("Processing " + appid + '...')

        checkupdates_app(app)

    logging.info("Finished.")
Пример #49
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    unsigned_dir = 'unsigned'
    if not os.path.isdir(unsigned_dir):
        logging.error("No unsigned directory - nothing to do")
        sys.exit(0)

    verified = 0
    notverified = 0

    vercodes = common.read_pkg_args(args, True)

    for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):

        apkfilename = os.path.basename(apkfile)
        appid, vercode = common.apknameinfo(apkfile)

        if vercodes and appid not in vercodes:
            continue
        if vercodes[appid] and vercode not in vercodes[appid]:
            continue

        try:

            logging.info("Processing " + apkfilename)

            remoteapk = os.path.join(tmp_dir, apkfilename)
            if os.path.exists(remoteapk):
                os.remove(remoteapk)
            url = 'https://f-droid.org/repo/' + apkfilename
            logging.info("...retrieving " + url)
            p = FDroidPopen(['wget', url], cwd=tmp_dir)
            if p.returncode != 0:
                raise FDroidException("Failed to get " + apkfilename)

            thisdir = os.path.join(tmp_dir, 'this_apk')
            thatdir = os.path.join(tmp_dir, 'that_apk')
            for d in [thisdir, thatdir]:
                if os.path.exists(d):
                    shutil.rmtree(d)
                os.mkdir(d)

            if subprocess.call(['jar', 'xf',
                                os.path.join("..", "..", unsigned_dir, apkfilename)],
                               cwd=thisdir) != 0:
                raise FDroidException("Failed to unpack local build of " + apkfilename)
            if subprocess.call(['jar', 'xf',
                                os.path.join("..", "..", remoteapk)],
                               cwd=thatdir) != 0:
                raise FDroidException("Failed to unpack remote build of " + apkfilename)

            p = FDroidPopen(['diff', '-r', 'this_apk', 'that_apk'], cwd=tmp_dir)
            lines = p.output.splitlines()
            if len(lines) != 1 or 'META-INF' not in lines[0]:
                raise FDroidException("Unexpected diff output - " + p.output)

            logging.info("...successfully verified")
            verified += 1

        except FDroidException, e:
            logging.info("...NOT verified - {0}".format(e))
            notverified += 1
Пример #50
0
 def test_read_config(self):
     # Try to read a non-existent config. Config is vital
     # so this should blow up.
     with self.assertRaises(Exception):
         common.read_config('')
Пример #51
0
def main(home_dir):
    global_conf = read_config(sanitize_path(home_dir))
    print(sanitize_path(global_conf['setup_dir']))
def find_and_load_config(fn):
    fn = conditionally_prepend_section_path(fn)
    config = read_config(fn)
    return fn, config
Пример #53
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-d", "--download", action="store_true", default=False,
                      help="Download logs we don't have")
    parser.add_option("--recalc", action="store_true", default=False,
                      help="Recalculate aggregate stats - use when changes "
                      "have been made that would invalidate old cached data.")
    parser.add_option("--nologs", action="store_true", default=False,
                      help="Don't do anything logs-related")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if not config['update_stats']:
        logging.info("Stats are disabled - check your configuration")
        sys.exit(1)

    # Get all metadata-defined apps...
    metaapps = [a for a in metadata.read_metadata().itervalues() if not a['Disabled']]

    statsdir = 'stats'
    logsdir = os.path.join(statsdir, 'logs')
    datadir = os.path.join(statsdir, 'data')
    if not os.path.exists(statsdir):
        os.mkdir(statsdir)
    if not os.path.exists(logsdir):
        os.mkdir(logsdir)
    if not os.path.exists(datadir):
        os.mkdir(datadir)

    if options.download:
        # Get any access logs we don't have...
        ssh = None
        ftp = None
        try:
            logging.info('Retrieving logs')
            ssh = paramiko.SSHClient()
            ssh.load_system_host_keys()
            ssh.connect('f-droid.org', username='******', timeout=10,
                        key_filename=config['webserver_keyfile'])
            ftp = ssh.open_sftp()
            ftp.get_channel().settimeout(60)
            logging.info("...connected")

            ftp.chdir('logs')
            files = ftp.listdir()
            for f in files:
                if f.startswith('access-') and f.endswith('.log.gz'):

                    destpath = os.path.join(logsdir, f)
                    destsize = ftp.stat(f).st_size
                    if (not os.path.exists(destpath) or
                            os.path.getsize(destpath) != destsize):
                        logging.debug("...retrieving " + f)
                        ftp.get(f, destpath)
        except Exception:
            traceback.print_exc()
            sys.exit(1)
        finally:
            # Disconnect
            if ftp is not None:
                ftp.close()
            if ssh is not None:
                ssh.close()

    knownapks = common.KnownApks()
    unknownapks = []

    if not options.nologs:
        # Process logs
        logging.info('Processing logs...')
        appscount = Counter()
        appsvercount = Counter()
        logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] ' + \
            '"GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) ' + \
            '\d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
        logsearch = re.compile(logexpr).search
        for logfile in glob.glob(os.path.join(logsdir, 'access-*.log.gz')):
            logging.debug('...' + logfile)

            # Get the date for this log - e.g. 2012-02-28
            thisdate = os.path.basename(logfile)[7:-7]

            agg_path = os.path.join(datadir, thisdate + '.json')
            if not options.recalc and os.path.exists(agg_path):
                # Use previously calculated aggregate data
                with open(agg_path, 'r') as f:
                    today = json.load(f)

            else:
                # Calculate from logs...

                today = {
                    'apps': Counter(),
                    'appsver': Counter(),
                    'unknown': []
                    }

                p = subprocess.Popen(["zcat", logfile], stdout=subprocess.PIPE)
                matches = (logsearch(line) for line in p.stdout)
                for match in matches:
                    if not match:
                        continue
                    if match.group('statuscode') != '200':
                        continue
                    if match.group('ip') in config['stats_ignore']:
                        continue
                    uri = match.group('uri')
                    if not uri.endswith('.apk'):
                        continue
                    _, apkname = os.path.split(uri)
                    app = knownapks.getapp(apkname)
                    if app:
                        appid, _ = app
                        today['apps'][appid] += 1
                        # Strip the '.apk' from apkname
                        appver = apkname[:-4]
                        today['appsver'][appver] += 1
                    else:
                        if apkname not in today['unknown']:
                            today['unknown'].append(apkname)

                # Save calculated aggregate data for today to cache
                with open(agg_path, 'w') as f:
                    json.dump(today, f)

            # Add today's stats (whether cached or recalculated) to the total
            for appid in today['apps']:
                appscount[appid] += today['apps'][appid]
            for appid in today['appsver']:
                appsvercount[appid] += today['appsver'][appid]
            for uk in today['unknown']:
                if uk not in unknownapks:
                    unknownapks.append(uk)

        # Calculate and write stats for total downloads...
        lst = []
        alldownloads = 0
        for appid in appscount:
            count = appscount[appid]
            lst.append(appid + " " + str(count))
            if config['stats_to_carbon']:
                carbon_send('fdroid.download.' + appid.replace('.', '_'),
                            count)
            alldownloads += count
        lst.append("ALL " + str(alldownloads))
        f = open('stats/total_downloads_app.txt', 'w')
        f.write('# Total downloads by application, since October 2011\n')
        for line in sorted(lst):
            f.write(line + '\n')
        f.close()

        f = open('stats/total_downloads_app_version.txt', 'w')
        f.write('# Total downloads by application and version, '
                'since October 2011\n')
        lst = []
        for appver in appsvercount:
            count = appsvercount[appver]
            lst.append(appver + " " + str(count))
        for line in sorted(lst):
            f.write(line + "\n")
        f.close()

    # Calculate and write stats for repo types...
    logging.info("Processing repo types...")
    repotypes = Counter()
    for app in metaapps:
        rtype = app['Repo Type'] or 'none'
        if rtype == 'srclib':
            rtype = common.getsrclibvcs(app['Repo'])
        repotypes[rtype] += 1
    f = open('stats/repotypes.txt', 'w')
    for rtype in repotypes:
        count = repotypes[rtype]
        f.write(rtype + ' ' + str(count) + '\n')
    f.close()

    # Calculate and write stats for update check modes...
    logging.info("Processing update check modes...")
    ucms = Counter()
    for app in metaapps:
        checkmode = app['Update Check Mode']
        if checkmode.startswith('RepoManifest/'):
            checkmode = checkmode[:12]
        if checkmode.startswith('Tags '):
            checkmode = checkmode[:4]
        ucms[checkmode] += 1
    f = open('stats/update_check_modes.txt', 'w')
    for checkmode in ucms:
        count = ucms[checkmode]
        f.write(checkmode + ' ' + str(count) + '\n')
    f.close()

    logging.info("Processing categories...")
    ctgs = Counter()
    for app in metaapps:
        for category in app['Categories']:
            ctgs[category] += 1
    f = open('stats/categories.txt', 'w')
    for category in ctgs:
        count = ctgs[category]
        f.write(category + ' ' + str(count) + '\n')
    f.close()

    logging.info("Processing antifeatures...")
    afs = Counter()
    for app in metaapps:
        if app['AntiFeatures'] is None:
            continue
        antifeatures = [a.strip() for a in app['AntiFeatures'].split(',')]
        for antifeature in antifeatures:
            afs[antifeature] += 1
    f = open('stats/antifeatures.txt', 'w')
    for antifeature in afs:
        count = afs[antifeature]
        f.write(antifeature + ' ' + str(count) + '\n')
    f.close()

    # Calculate and write stats for licenses...
    logging.info("Processing licenses...")
    licenses = Counter()
    for app in metaapps:
        license = app['License']
        licenses[license] += 1
    f = open('stats/licenses.txt', 'w')
    for license in licenses:
        count = licenses[license]
        f.write(license + ' ' + str(count) + '\n')
    f.close()

    # Write list of latest apps added to the repo...
    logging.info("Processing latest apps...")
    latest = knownapks.getlatest(10)
    f = open('stats/latestapps.txt', 'w')
    for app in latest:
        f.write(app + '\n')
    f.close()

    if unknownapks:
        logging.info('\nUnknown apks:')
        for apk in unknownapks:
            logging.info(apk)

    logging.info("Finished.")
Пример #54
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-c", "--createmeta", action="store_true", default=False,
                      help="Create skeleton metadata files that are missing")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="No output, except for warnings and errors")
    parser.add_option("-b", "--buildreport", action="store_true", default=False,
                      help="Report on build data status")
    parser.add_option("-i", "--interactive", default=False, action="store_true",
                      help="Interactively ask about things that need updating.")
    parser.add_option("-I", "--icons", action="store_true", default=False,
                      help="Resize all the icons exceeding the max pixel size and exit")
    parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
                      help="Specify editor to use in interactive mode. Default "+
                          "is /etc/alternatives/editor")
    parser.add_option("-w", "--wiki", default=False, action="store_true",
                      help="Update the wiki")
    parser.add_option("", "--pretty", action="store_true", default=False,
                      help="Produce human-readable index.xml")
    parser.add_option("--clean", action="store_true", default=False,
                      help="Clean update - don't uses caches, reprocess all apks")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')
        if not os.path.exists('archive'):
            os.mkdir('archive')

    if options.icons:
        resize_all_icons(repodirs)
        sys.exit(0)

    # Get all apps...
    apps = metadata.read_metadata()

    # Generate a list of categories...
    categories = []
    for app in apps:
        cats = app['Categories'].split(',')
        for cat in cats:
            if cat not in categories:
                categories.append(cat)

    # Read known apks data (will be updated and written back when we've finished)
    knownapks = common.KnownApks()

    # Gather information about all the apk files in the repo directory, using
    # cached data if possible.
    apkcachefile = os.path.join('tmp', 'apkcache')
    if not options.clean and os.path.exists(apkcachefile):
        with open(apkcachefile, 'rb') as cf:
            apkcache = pickle.load(cf)
    else:
        apkcache = {}
    cachechanged = False

    delete_disabled_builds(apps, apkcache, repodirs)

    # Scan all apks in the main repo
    apks, cc = scan_apks(apps, apkcache, repodirs[0], knownapks)
    if cc:
        cachechanged = True

    # Scan the archive repo for apks as well
    if len(repodirs) > 1:
        archapks, cc = scan_apks(apps, apkcache, repodirs[1], knownapks)
        if cc:
            cachechanged = True
    else:
        archapks = []

    # Some information from the apks needs to be applied up to the application
    # level. When doing this, we use the info from the most recent version's apk.
    # We deal with figuring out when the app was added and last updated at the
    # same time.
    for app in apps:
        bestver = 0
        added = None
        lastupdated = None
        for apk in apks + archapks:
            if apk['id'] == app['id']:
                if apk['versioncode'] > bestver:
                    bestver = apk['versioncode']
                    bestapk = apk

                if 'added' in apk:
                    if not added or apk['added'] < added:
                        added = apk['added']
                    if not lastupdated or apk['added'] > lastupdated:
                        lastupdated = apk['added']

        if added:
            app['added'] = added
        else:
            if options.verbose:
                print "WARNING: Don't know when " + app['id'] + " was added"
        if lastupdated:
            app['lastupdated'] = lastupdated
        else:
            if options.verbose:
                print "WARNING: Don't know when " + app['id'] + " was last updated"

        if bestver == 0:
            if app['Name'] is None:
                app['Name'] = app['id']
            app['icon'] = None
            if options.verbose and app['Disabled'] is None:
                print "WARNING: Application " + app['id'] + " has no packages"
        else:
            if app['Name'] is None:
                app['Name'] = bestapk['name']
            app['icon'] = bestapk['icon'] if 'icon' in bestapk else None

    # Sort the app list by name, then the web site doesn't have to by default.
    # (we had to wait until we'd scanned the apks to do this, because mostly the
    # name comes from there!)
    apps = sorted(apps, key=lambda app: app['Name'].upper())

    # Generate warnings for apk's with no metadata (or create skeleton
    # metadata files, if requested on the command line)
    for apk in apks:
        found = False
        for app in apps:
            if app['id'] == apk['id']:
                found = True
                break
        if not found:
            if options.createmeta:
                f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
                f.write("License:Unknown\n")
                f.write("Web Site:\n")
                f.write("Source Code:\n")
                f.write("Issue Tracker:\n")
                f.write("Summary:" + apk['name'] + "\n")
                f.write("Description:\n")
                f.write(apk['name'] + "\n")
                f.write(".\n")
                f.close()
                print "Generated skeleton metadata for " + apk['id']
            else:
                print "WARNING: " + apk['apkname'] + " (" + apk['id'] + ") has no metadata"
                print "       " + apk['name'] + " - " + apk['version']  

    if len(repodirs) > 1:
        archive_old_apks(apps, apks, repodirs[0], repodirs[1], config['archive_older'])

    # Make the index for the main repo...
    make_index(apps, apks, repodirs[0], False, categories)

    # If there's an archive repo,  make the index for it. We already scanned it
    # earlier on.
    if len(repodirs) > 1:
        make_index(apps, archapks, repodirs[1], True, categories)

    if config['update_stats']:

        # Update known apks info...
        knownapks.writeifchanged()

        # Generate latest apps data for widget
        if os.path.exists(os.path.join('stats', 'latestapps.txt')):
            data = ''
            for line in file(os.path.join('stats', 'latestapps.txt')):
                appid = line.rstrip()
                data += appid + "\t"
                for app in apps:
                    if app['id'] == appid:
                        data += app['Name'] + "\t"
                        if app['icon'] is not None:
                            data += app['icon'] + "\t"
                        data += app['License'] + "\n"
                        break
            f = open(os.path.join(repodirs[0], 'latestapps.dat'), 'w')
            f.write(data)
            f.close()

    if cachechanged:
        with open(apkcachefile, 'wb') as cf:
            pickle.dump(apkcache, cf)

    # Update the wiki...
    if options.wiki:
        update_wiki(apps, apks + archapks)

    print "Finished."
Пример #55
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("--auto", action="store_true", default=False,
                      help="Process auto-updates")
    parser.add_option("--autoonly", action="store_true", default=False,
                      help="Only process apps with auto-updates")
    parser.add_option("--commit", action="store_true", default=False,
                      help="Commit changes")
    parser.add_option("--gplay", action="store_true", default=False,
                      help="Only print differences with the Play Store")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata()

    apps = common.read_app_args(args, allapps, False)

    if options.gplay:
        for app in apps:
            version, reason = check_gplay(app)
            if version is None:
                if reason == '404':
                    logging.info("{0} is not in the Play Store".format(common.getappname(app)))
                else:
                    logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
            if version is not None:
                stored = app['Current Version']
                if not stored:
                    logging.info("{0} has no Current Version but has version {1} on the Play Store"
                                 .format(common.getappname(app), version))
                elif LooseVersion(stored) < LooseVersion(version):
                    logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
                                 .format(common.getappname(app), version, stored))
                else:
                    if stored != version:
                        logging.info("{0} has version {1} on the Play Store, which differs from {2}"
                                     .format(common.getappname(app), version, stored))
                    else:
                        logging.info("{0} has the same version {1} on the Play Store"
                                     .format(common.getappname(app), version))
        return

    for appid, app in apps.iteritems():

        if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
            logging.debug("Nothing to do for {0}...".format(appid))
            continue

        logging.info("Processing " + appid + '...')

        checkupdates_app(app)

    logging.info("Finished.")
Пример #56
0
def main():

    global config, options, curid, count
    curid = None

    count = Counter()

    def warn(message):
        global curid, count
        if curid:
            print "%s:" % curid
            curid = None
            count["app"] += 1
        print "    %s" % message
        count["warn"] += 1

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option(
        "-v", "--verbose", action="store_true", default=False, help="Spew out even more information than normal"
    )
    parser.add_option(
        "-q", "--quiet", action="store_true", default=False, help="Restrict output to warnings and errors"
    )
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=False)
    apps = common.read_app_args(args, allapps, False)

    filling_ucms = re.compile("^(Tags.*|RepoManifest.*)")

    for appid, app in apps.iteritems():
        if app["Disabled"]:
            continue

        curid = appid
        count["app_total"] += 1

        # enabled_builds = 0
        lowest_vercode = -1
        curbuild = None
        for build in app["builds"]:
            if not build["disable"]:
                # enabled_builds += 1
                vercode = int(build["vercode"])
                if lowest_vercode == -1 or vercode < lowest_vercode:
                    lowest_vercode = vercode
            if not curbuild or int(build["vercode"]) > int(curbuild["vercode"]):
                curbuild = build

        # Incorrect UCM
        if (
            curbuild
            and curbuild["commit"]
            and app["Update Check Mode"] == "RepoManifest"
            and not curbuild["commit"].startswith("unknown")
            and curbuild["vercode"] == app["Current Version Code"]
            and not curbuild["forcevercode"]
            and any(s in curbuild["commit"] for s in ".,_-/")
        ):
            warn(
                "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'"
                % (curbuild["commit"], app["Update Check Mode"])
            )

        # Summary size limit
        summ_chars = len(app["Summary"])
        if summ_chars > config["char_limits"]["Summary"]:
            warn("Summary of length %s is over the %i char limit" % (summ_chars, config["char_limits"]["Summary"]))

        # Redundant info
        if app["Web Site"] and app["Source Code"]:
            if app["Web Site"].lower() == app["Source Code"].lower():
                warn("Website '%s' is just the app's source code link" % app["Web Site"])

        if filling_ucms.match(app["Update Check Mode"]):
            if all(
                app[f] == metadata.app_defaults[f] for f in ["Auto Name", "Current Version", "Current Version Code"]
            ):
                warn("UCM is set but it looks like checkupdates hasn't been run yet")

        if app["Update Check Name"] == appid:
            warn("Update Check Name is set to the known app id - it can be removed")

        cvc = int(app["Current Version Code"])
        if cvc > 0 and cvc < lowest_vercode:
            warn("Current Version Code is lower than any enabled build")

        # Missing or incorrect categories
        if not app["Categories"]:
            warn("Categories are not set")
        for categ in app["Categories"]:
            if categ not in categories:
                warn("Category '%s' is not valid" % categ)

        if app["Name"] and app["Name"] == app["Auto Name"]:
            warn("Name '%s' is just the auto name" % app["Name"])

        name = app["Name"] or app["Auto Name"]
        if app["Summary"] and name:
            if app["Summary"].lower() == name.lower():
                warn("Summary '%s' is just the app's name" % app["Summary"])

        desc = app["Description"]
        if app["Summary"] and desc and len(desc) == 1:
            if app["Summary"].lower() == desc[0].lower():
                warn("Description '%s' is just the app's summary" % app["Summary"])

        # Description size limit
        desc_charcount = sum(len(l) for l in desc)
        if desc_charcount > config["char_limits"]["Description"]:
            warn(
                "Description of length %s is over the %i char limit"
                % (desc_charcount, config["char_limits"]["Description"])
            )

        if not desc[0] or not desc[-1] or any(not desc[l - 1] and not desc[l] for l in range(1, len(desc))):
            warn("Description has an extra empty line")

        # Check for lists using the wrong characters
        validchars = ["*", "#"]
        lchar = ""
        lcount = 0
        for l in app["Description"]:
            if len(l) < 1:
                continue

            for um in desc_url.finditer(l):
                url = um.group(1)
                for m, r in http_warnings:
                    if m.match(url):
                        warn("URL '%s' in Description: %s" % (url, r))

            c = l.decode("utf-8")[0]
            if c == lchar:
                lcount += 1
                if lcount > 3 and lchar not in validchars:
                    warn("Description has a list (%s) but it isn't bulleted (*) nor numbered (#)" % lchar)
                    break
            else:
                lchar = c
                lcount = 1

        # Regex checks in all kinds of fields
        for f in regex_warnings:
            for m, r in regex_warnings[f]:
                v = app[f]
                if type(v) == str:
                    if v is None:
                        continue
                    if m.match(v):
                        warn("%s '%s': %s" % (f, v, r))
                elif type(v) == list:
                    for l in v:
                        if m.match(l):
                            warn("%s at line '%s': %s" % (f, l, r))

        # Build warnings
        for build in app["builds"]:
            if build["disable"]:
                continue
            for s in ["master", "origin", "HEAD", "default", "trunk"]:
                if build["commit"] and build["commit"].startswith(s):
                    warn("Branch '%s' used as commit in build '%s'" % (s, build["version"]))
                for srclib in build["srclibs"]:
                    ref = srclib.split("@")[1].split("/")[0]
                    if ref.startswith(s):
                        warn("Branch '%s' used as commit in srclib '%s'" % (s, srclib))

        if not curid:
            print

    logging.info(
        "Found a total of %i warnings in %i apps out of %i total." % (count["warn"], count["app"], count["app_total"])
    )

    sys.exit(1 if count["warn"] > 0 else 0)
Пример #57
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-c",
                      "--create-metadata",
                      action="store_true",
                      default=False,
                      help="Create skeleton metadata files that are missing")
    parser.add_option("--delete-unknown",
                      action="store_true",
                      default=False,
                      help="Delete APKs without metadata from the repo")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-b",
                      "--buildreport",
                      action="store_true",
                      default=False,
                      help="Report on build data status")
    parser.add_option(
        "-i",
        "--interactive",
        default=False,
        action="store_true",
        help="Interactively ask about things that need updating.")
    parser.add_option(
        "-I",
        "--icons",
        action="store_true",
        default=False,
        help="Resize all the icons exceeding the max pixel size and exit")
    parser.add_option(
        "-e",
        "--editor",
        default="/etc/alternatives/editor",
        help="Specify editor to use in interactive mode. Default " +
        "is /etc/alternatives/editor")
    parser.add_option("-w",
                      "--wiki",
                      default=False,
                      action="store_true",
                      help="Update the wiki")
    parser.add_option("",
                      "--pretty",
                      action="store_true",
                      default=False,
                      help="Produce human-readable index.xml")
    parser.add_option(
        "--clean",
        action="store_true",
        default=False,
        help="Clean update - don't uses caches, reprocess all apks")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')
        if not os.path.exists('archive'):
            os.mkdir('archive')

    if options.icons:
        resize_all_icons(repodirs)
        sys.exit(0)

    # check that icons exist now, rather than fail at the end of `fdroid update`
    for k in ['repo_icon', 'archive_icon']:
        if k in config:
            if not os.path.exists(config[k]):
                logging.critical(k + ' "' + config[k] +
                                 '" does not exist! Correct it in config.py.')
                sys.exit(1)

    # Get all apps...
    apps = metadata.read_metadata()

    # Generate a list of categories...
    categories = set()
    for app in apps.itervalues():
        categories.update(app['Categories'])

    # Read known apks data (will be updated and written back when we've finished)
    knownapks = common.KnownApks()

    # Gather information about all the apk files in the repo directory, using
    # cached data if possible.
    apkcachefile = os.path.join('tmp', 'apkcache')
    if not options.clean and os.path.exists(apkcachefile):
        with open(apkcachefile, 'rb') as cf:
            apkcache = pickle.load(cf)
    else:
        apkcache = {}
    cachechanged = False

    delete_disabled_builds(apps, apkcache, repodirs)

    # Scan all apks in the main repo
    apks, cc = scan_apks(apps, apkcache, repodirs[0], knownapks)
    if cc:
        cachechanged = True

    # Generate warnings for apk's with no metadata (or create skeleton
    # metadata files, if requested on the command line)
    newmetadata = False
    for apk in apks:
        if apk['id'] not in apps:
            if options.create_metadata:
                if 'name' not in apk:
                    logging.error(apk['id'] +
                                  ' does not have a name! Skipping...')
                    continue
                f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
                f.write("License:Unknown\n")
                f.write("Web Site:\n")
                f.write("Source Code:\n")
                f.write("Issue Tracker:\n")
                f.write("Summary:" + apk['name'] + "\n")
                f.write("Description:\n")
                f.write(apk['name'] + "\n")
                f.write(".\n")
                f.close()
                logging.info("Generated skeleton metadata for " + apk['id'])
                newmetadata = True
            else:
                msg = apk['apkname'] + " (" + apk['id'] + ") has no metadata!"
                if options.delete_unknown:
                    logging.warn(msg + "\n\tdeleting: repo/" + apk['apkname'])
                    rmf = os.path.join(repodirs[0], apk['apkname'])
                    if not os.path.exists(rmf):
                        logging.error(
                            "Could not find {0} to remove it".format(rmf))
                    else:
                        os.remove(rmf)
                else:
                    logging.warn(msg +
                                 "\n\tUse `fdroid update -c` to create it.")

    # update the metadata with the newly created ones included
    if newmetadata:
        apps = metadata.read_metadata()

    # Scan the archive repo for apks as well
    if len(repodirs) > 1:
        archapks, cc = scan_apks(apps, apkcache, repodirs[1], knownapks)
        if cc:
            cachechanged = True
    else:
        archapks = []

    # Some information from the apks needs to be applied up to the application
    # level. When doing this, we use the info from the most recent version's apk.
    # We deal with figuring out when the app was added and last updated at the
    # same time.
    for appid, app in apps.iteritems():
        bestver = 0
        added = None
        lastupdated = None
        for apk in apks + archapks:
            if apk['id'] == appid:
                if apk['versioncode'] > bestver:
                    bestver = apk['versioncode']
                    bestapk = apk

                if 'added' in apk:
                    if not added or apk['added'] < added:
                        added = apk['added']
                    if not lastupdated or apk['added'] > lastupdated:
                        lastupdated = apk['added']

        if added:
            app['added'] = added
        else:
            logging.warn("Don't know when " + appid + " was added")
        if lastupdated:
            app['lastupdated'] = lastupdated
        else:
            logging.warn("Don't know when " + appid + " was last updated")

        if bestver == 0:
            if app['Name'] is None:
                app['Name'] = appid
            app['icon'] = None
            logging.warn("Application " + appid + " has no packages")
        else:
            if app['Name'] is None:
                app['Name'] = bestapk['name']
            app['icon'] = bestapk['icon'] if 'icon' in bestapk else None

    # Sort the app list by name, then the web site doesn't have to by default.
    # (we had to wait until we'd scanned the apks to do this, because mostly the
    # name comes from there!)
    sortedids = sorted(apps.iterkeys(),
                       key=lambda appid: apps[appid]['Name'].upper())

    if len(repodirs) > 1:
        archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1],
                         config['archive_older'])

    # Make the index for the main repo...
    make_index(apps, sortedids, apks, repodirs[0], False, categories)

    # If there's an archive repo,  make the index for it. We already scanned it
    # earlier on.
    if len(repodirs) > 1:
        make_index(apps, sortedids, archapks, repodirs[1], True, categories)

    if config['update_stats']:

        # Update known apks info...
        knownapks.writeifchanged()

        # Generate latest apps data for widget
        if os.path.exists(os.path.join('stats', 'latestapps.txt')):
            data = ''
            for line in file(os.path.join('stats', 'latestapps.txt')):
                appid = line.rstrip()
                data += appid + "\t"
                app = apps[appid]
                data += app['Name'] + "\t"
                if app['icon'] is not None:
                    data += app['icon'] + "\t"
                data += app['License'] + "\n"
            f = open(os.path.join(repodirs[0], 'latestapps.dat'), 'w')
            f.write(data)
            f.close()

    if cachechanged:
        with open(apkcachefile, 'wb') as cf:
            pickle.dump(apkcache, cf)

    # Update the wiki...
    if options.wiki:
        update_wiki(apps, sortedids, apks + archapks)

    logging.info("Finished.")