Пример #1
0
def main():

    parser = ArgumentParser(usage="%(prog)s")
    common.setup_global_opts(parser)
    parser.parse_args()
    common.read_config(None)

    metadata.read_metadata(xref=True)
Пример #2
0
def main():

    global config, options

    anywarns = False

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("-f", "--format", action="store_true", default=False,
                        help="Also warn about formatting issues, like rewritemeta -l")
    parser.add_argument("appid", nargs='*', help="app-id in the form APPID")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(options.appid, allapps, False)

    for appid, app in apps.iteritems():
        if app.Disabled:
            continue

        warns = []

        for check_func in [
                check_regexes,
                check_ucm_tags,
                check_char_limits,
                check_old_links,
                check_checkupdates_ran,
                check_useless_fields,
                check_empty_fields,
                check_categories,
                check_duplicates,
                check_mediawiki_links,
                check_bulleted_lists,
                check_builds,
                ]:
            warns += check_func(app)

        if options.format:
            if not rewritemeta.proper_format(app):
                warns.append("Run rewritemeta to fix formatting")

        if warns:
            anywarns = True
            for warn in warns:
                print("%s: %s" % (appid, warn))

    if anywarns:
        sys.exit(1)
Пример #3
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("-l", "--list", action="store_true", default=False,
                        help="List files that would be reformatted")
    parser.add_argument("-t", "--to", default=None,
                        help="Rewrite to a specific format")
    parser.add_argument("appid", nargs='*', help="app-id in the form APPID")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(options.appid, allapps, False)

    if options.list and options.to is not None:
        parser.error("Cannot use --list and --to at the same time")

    supported = ['txt', 'yaml']

    if options.to is not None and options.to not in supported:
        parser.error("Must give a valid format to --to")

    for appid, app in apps.iteritems():
        base, ext = common.get_extension(app.metadatapath)
        if not options.to and ext not in supported:
            logging.info("Ignoring %s file at '%s'" % (ext, app.metadatapath))
            continue

        to_ext = ext
        if options.to is not None:
            to_ext = options.to

        if options.list:
            if not proper_format(app):
                print app.metadatapath
            continue

        with open(base + '.' + to_ext, 'w') as f:
            metadata.write_metadata(to_ext, f, app)

        if ext != to_ext:
            os.remove(app.metadatapath)

    logging.debug("Finished.")
Пример #4
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options]")
    common.setup_global_opts(parser)
    options = parser.parse_args()

    config = common.read_config(options)

    if 'jarsigner' not in config:
        logging.critical('Java jarsigner not found! Install in standard location or set java_paths!')
        sys.exit(1)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')

    signed = 0
    for output_dir in repodirs:
        if not os.path.isdir(output_dir):
            logging.error("Missing output directory '" + output_dir + "'")
            sys.exit(1)

        unsigned = os.path.join(output_dir, 'index_unsigned.jar')
        if os.path.exists(unsigned):

            args = [config['jarsigner'], '-keystore', config['keystore'],
                    '-storepass:file', config['keystorepassfile'],
                    '-digestalg', 'SHA1', '-sigalg', 'SHA1withRSA',
                    unsigned, config['repo_keyalias']]
            if config['keystore'] == 'NONE':
                args += config['smartcardoptions']
            else:  # smardcards never use -keypass
                args += ['-keypass:file', config['keypassfile']]
            p = FDroidPopen(args)
            if p.returncode != 0:
                logging.critical("Failed to sign index")
                sys.exit(1)
            os.rename(unsigned, os.path.join(output_dir, 'index.jar'))
            logging.info('Signed index in ' + output_dir)
            signed += 1

    if signed == 0:
        logging.info("Nothing to do")
Пример #5
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options]")
    common.setup_global_opts(parser)
    options = parser.parse_args()

    config = common.read_config(options)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')

    for output_dir in repodirs:
        if not os.path.isdir(output_dir):
            logging.error("Missing output directory '" + output_dir + "'")
            sys.exit(1)

        # Process any apks that are waiting to be signed...
        for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):

            apkfilename = os.path.basename(apkfile)
            sigfilename = apkfilename + ".asc"
            sigpath = os.path.join(output_dir, sigfilename)

            if not os.path.exists(sigpath):
                gpgargs = ['gpg', '-a',
                           '--output', sigpath,
                           '--detach-sig']
                if 'gpghome' in config:
                    gpgargs.extend(['--homedir', config['gpghome']])
                if 'gpgkey' in config:
                    gpgargs.extend(['--local-user', config['gpgkey']])
                gpgargs.append(os.path.join(output_dir, apkfilename))
                p = FDroidPopen(gpgargs)
                if p.returncode != 0:
                    logging.error("Signing failed.")
                    sys.exit(1)

                logging.info('Signed ' + apkfilename)
Пример #6
0
def parse_commandline():
    """Parse the command line. Returns options, parser."""

    parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id with optional versioncode in the form APPID[:VERCODE]")
    parser.add_argument("-l", "--latest", action="store_true", default=False,
                        help="Build only the latest version of each package")
    parser.add_argument("-s", "--stop", action="store_true", default=False,
                        help="Make the build stop on exceptions")
    parser.add_argument("-t", "--test", action="store_true", default=False,
                        help="Test mode - put output in the tmp directory only, and always build, even if the output already exists.")
    parser.add_argument("--server", action="store_true", default=False,
                        help="Use build server")
    parser.add_argument("--resetserver", action="store_true", default=False,
                        help="Reset and create a brand new build server, even if the existing one appears to be ok.")
    parser.add_argument("--on-server", dest="onserver", action="store_true", default=False,
                        help="Specify that we're running on the build server")
    parser.add_argument("--skip-scan", dest="skipscan", action="store_true", default=False,
                        help="Skip scanning the source code for binaries and other problems")
    parser.add_argument("--no-tarball", dest="notarball", action="store_true", default=False,
                        help="Don't create a source tarball, useful when testing a build")
    parser.add_argument("--no-refresh", dest="refresh", action="store_false", default=True,
                        help="Don't refresh the repository, useful when testing a build with no internet connection")
    parser.add_argument("-f", "--force", action="store_true", default=False,
                        help="Force build of disabled apps, and carries on regardless of scan problems. Only allowed in test mode.")
    parser.add_argument("-a", "--all", action="store_true", default=False,
                        help="Build all applications available")
    parser.add_argument("-w", "--wiki", default=False, action="store_true",
                        help="Update the wiki")
    options = parser.parse_args()

    # Force --stop with --on-server to get correct exit code
    if options.onserver:
        options.stop = True

    if options.force and not options.test:
        parser.error("option %s: Force is only allowed in test mode" % "force")

    return options, parser
Пример #7
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options]")
    common.setup_global_opts(parser)
    options = parser.parse_args()

    config = common.read_config(options)

    repodirs = ["repo"]
    if config["archive_older"] != 0:
        repodirs.append("archive")

    for output_dir in repodirs:
        if not os.path.isdir(output_dir):
            logging.error("Missing output directory '" + output_dir + "'")
            sys.exit(1)

        # Process any apks that are waiting to be signed...
        for apkfile in sorted(glob.glob(os.path.join(output_dir, "*.apk"))):

            apkfilename = os.path.basename(apkfile)
            sigfilename = apkfilename + ".asc"
            sigpath = os.path.join(output_dir, sigfilename)

            if not os.path.exists(sigpath):
                gpgargs = ["gpg", "-a", "--output", sigpath, "--detach-sig"]
                if "gpghome" in config:
                    gpgargs.extend(["--homedir", config["gpghome"]])
                gpgargs.append(os.path.join(output_dir, apkfilename))
                p = FDroidPopen(gpgargs)
                if p.returncode != 0:
                    logging.error("Signing failed.")
                    sys.exit(1)

                logging.info("Signed " + apkfilename)
Пример #8
0
def main():

    global options, config

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id with optional versioncode in the form APPID[:VERCODE]")
    parser.add_argument("-a", "--all", action="store_true", default=False,
                        help="Install all signed applications available")
    options = parser.parse_args()

    if not options.appid and not options.all:
        parser.error("option %s: If you really want to install all the signed apps, use --all" % "all")

    config = common.read_config(options)

    output_dir = 'repo'
    if not os.path.isdir(output_dir):
        logging.info("No signed output directory - nothing to do")
        sys.exit(0)

    if options.appid:

        vercodes = common.read_pkg_args(options.appid, True)
        apks = {appid: None for appid in vercodes}

        # Get the signed apk with the highest vercode
        for apkfile in sorted(glob.glob(os.path.join(output_dir, '*.apk'))):

            try:
                appid, vercode = common.apknameinfo(apkfile)
            except FDroidException:
                continue
            if appid not in apks:
                continue
            if vercodes[appid] and vercode not in vercodes[appid]:
                continue
            apks[appid] = apkfile

        for appid, apk in apks.iteritems():
            if not apk:
                raise FDroidException("No signed apk available for %s" % appid)

    else:

        apks = {common.apknameinfo(apkfile)[0]: apkfile for apkfile in
                sorted(glob.glob(os.path.join(output_dir, '*.apk')))}

    for appid, apk in apks.iteritems():
        # Get device list each time to avoid device not found errors
        devs = devices()
        if not devs:
            raise FDroidException("No attached devices found")
        logging.info("Installing %s..." % apk)
        for dev in devs:
            logging.info("Installing %s on %s..." % (apk, dev))
            p = SdkToolsPopen(['adb', "-s", dev, "install", apk])
            fail = ""
            for line in p.output.splitlines():
                if line.startswith("Failure"):
                    fail = line[9:-1]
            if not fail:
                continue

            if fail == "INSTALL_FAILED_ALREADY_EXISTS":
                logging.warn("%s is already installed on %s." % (apk, dev))
            else:
                raise FDroidException("Failed to install %s on %s: %s" % (
                    apk, dev, fail))

    logging.info("\nFinished")
Пример #9
0
def main():

    global options, config

    # Parse command line...
    parser = ArgumentParser()
    common.setup_global_opts(parser)
    parser.add_argument("-d", "--download", action="store_true", default=False,
                        help="Download logs we don't have")
    parser.add_argument("--recalc", action="store_true", default=False,
                        help="Recalculate aggregate stats - use when changes "
                        "have been made that would invalidate old cached data.")
    parser.add_argument("--nologs", action="store_true", default=False,
                        help="Don't do anything logs-related")
    options = parser.parse_args()

    config = common.read_config(options)

    if not config['update_stats']:
        logging.info("Stats are disabled - set \"update_stats = True\" in your config.py")
        sys.exit(1)

    # Get all metadata-defined apps...
    allmetaapps = [app for app in metadata.read_metadata().itervalues()]
    metaapps = [app for app in allmetaapps if not app.Disabled]

    statsdir = 'stats'
    logsdir = os.path.join(statsdir, 'logs')
    datadir = os.path.join(statsdir, 'data')
    if not os.path.exists(statsdir):
        os.mkdir(statsdir)
    if not os.path.exists(logsdir):
        os.mkdir(logsdir)
    if not os.path.exists(datadir):
        os.mkdir(datadir)

    if options.download:
        # Get any access logs we don't have...
        ssh = None
        ftp = None
        try:
            logging.info('Retrieving logs')
            ssh = paramiko.SSHClient()
            ssh.load_system_host_keys()
            ssh.connect(config['stats_server'], username=config['stats_user'],
                        timeout=10, key_filename=config['webserver_keyfile'])
            ftp = ssh.open_sftp()
            ftp.get_channel().settimeout(60)
            logging.info("...connected")

            ftp.chdir('logs')
            files = ftp.listdir()
            for f in files:
                if f.startswith('access-') and f.endswith('.log.gz'):

                    destpath = os.path.join(logsdir, f)
                    destsize = ftp.stat(f).st_size
                    if (not os.path.exists(destpath) or
                            os.path.getsize(destpath) != destsize):
                        logging.debug("...retrieving " + f)
                        ftp.get(f, destpath)
        except Exception:
            traceback.print_exc()
            sys.exit(1)
        finally:
            # Disconnect
            if ftp is not None:
                ftp.close()
            if ssh is not None:
                ssh.close()

    knownapks = common.KnownApks()
    unknownapks = []

    if not options.nologs:
        # Process logs
        logging.info('Processing logs...')
        appscount = Counter()
        appsvercount = Counter()
        logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] ' + \
            '"GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) ' + \
            '\d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
        logsearch = re.compile(logexpr).search
        for logfile in glob.glob(os.path.join(logsdir, 'access-*.log.gz')):
            logging.debug('...' + logfile)

            # Get the date for this log - e.g. 2012-02-28
            thisdate = os.path.basename(logfile)[7:-7]

            agg_path = os.path.join(datadir, thisdate + '.json')
            if not options.recalc and os.path.exists(agg_path):
                # Use previously calculated aggregate data
                with open(agg_path, 'r') as f:
                    today = json.load(f)

            else:
                # Calculate from logs...

                today = {
                    'apps': Counter(),
                    'appsver': Counter(),
                    'unknown': []
                }

                p = subprocess.Popen(["zcat", logfile], stdout=subprocess.PIPE)
                matches = (logsearch(line) for line in p.stdout)
                for match in matches:
                    if not match:
                        continue
                    if match.group('statuscode') != '200':
                        continue
                    if match.group('ip') in config['stats_ignore']:
                        continue
                    uri = match.group('uri')
                    if not uri.endswith('.apk'):
                        continue
                    _, apkname = os.path.split(uri)
                    app = knownapks.getapp(apkname)
                    if app:
                        appid, _ = app
                        today['apps'][appid] += 1
                        # Strip the '.apk' from apkname
                        appver = apkname[:-4]
                        today['appsver'][appver] += 1
                    else:
                        if apkname not in today['unknown']:
                            today['unknown'].append(apkname)

                # Save calculated aggregate data for today to cache
                with open(agg_path, 'w') as f:
                    json.dump(today, f)

            # Add today's stats (whether cached or recalculated) to the total
            for appid in today['apps']:
                appscount[appid] += today['apps'][appid]
            for appid in today['appsver']:
                appsvercount[appid] += today['appsver'][appid]
            for uk in today['unknown']:
                if uk not in unknownapks:
                    unknownapks.append(uk)

        # Calculate and write stats for total downloads...
        lst = []
        alldownloads = 0
        for appid in appscount:
            count = appscount[appid]
            lst.append(appid + " " + str(count))
            if config['stats_to_carbon']:
                carbon_send('fdroid.download.' + appid.replace('.', '_'),
                            count)
            alldownloads += count
        lst.append("ALL " + str(alldownloads))
        with open(os.path.join(statsdir, 'total_downloads_app.txt'), 'w') as f:
            f.write('# Total downloads by application, since October 2011\n')
            for line in sorted(lst):
                f.write(line + '\n')

        lst = []
        for appver in appsvercount:
            count = appsvercount[appver]
            lst.append(appver + " " + str(count))

        with open(os.path.join(statsdir, 'total_downloads_app_version.txt'), 'w') as f:
            f.write('# Total downloads by application and version, '
                    'since October 2011\n')
            for line in sorted(lst):
                f.write(line + "\n")

    # Calculate and write stats for repo types...
    logging.info("Processing repo types...")
    repotypes = Counter()
    for app in metaapps:
        rtype = app.RepoType or 'none'
        if rtype == 'srclib':
            rtype = common.getsrclibvcs(app.Repo)
        repotypes[rtype] += 1
    with open(os.path.join(statsdir, 'repotypes.txt'), 'w') as f:
        for rtype, count in most_common_stable(repotypes):
            f.write(rtype + ' ' + str(count) + '\n')

    # Calculate and write stats for update check modes...
    logging.info("Processing update check modes...")
    ucms = Counter()
    for app in metaapps:
        checkmode = app.UpdateCheckMode
        if checkmode.startswith('RepoManifest/'):
            checkmode = checkmode[:12]
        if checkmode.startswith('Tags '):
            checkmode = checkmode[:4]
        ucms[checkmode] += 1
    with open(os.path.join(statsdir, 'update_check_modes.txt'), 'w') as f:
        for checkmode, count in most_common_stable(ucms):
            f.write(checkmode + ' ' + str(count) + '\n')

    logging.info("Processing categories...")
    ctgs = Counter()
    for app in metaapps:
        for category in app.Categories:
            ctgs[category] += 1
    with open(os.path.join(statsdir, 'categories.txt'), 'w') as f:
        for category, count in most_common_stable(ctgs):
            f.write(category + ' ' + str(count) + '\n')

    logging.info("Processing antifeatures...")
    afs = Counter()
    for app in metaapps:
        if app.AntiFeatures is None:
            continue
        for antifeature in app.AntiFeatures:
            afs[antifeature] += 1
    with open(os.path.join(statsdir, 'antifeatures.txt'), 'w') as f:
        for antifeature, count in most_common_stable(afs):
            f.write(antifeature + ' ' + str(count) + '\n')

    # Calculate and write stats for licenses...
    logging.info("Processing licenses...")
    licenses = Counter()
    for app in metaapps:
        license = app.License
        licenses[license] += 1
    with open(os.path.join(statsdir, 'licenses.txt'), 'w') as f:
        for license, count in most_common_stable(licenses):
            f.write(license + ' ' + str(count) + '\n')

    # Write list of disabled apps...
    logging.info("Processing disabled apps...")
    disabled = [app.id for app in allmetaapps if app.Disabled]
    with open(os.path.join(statsdir, 'disabled_apps.txt'), 'w') as f:
        for appid in sorted(disabled):
            f.write(appid + '\n')

    # Write list of latest apps added to the repo...
    logging.info("Processing latest apps...")
    latest = knownapks.getlatest(10)
    with open(os.path.join(statsdir, 'latestapps.txt'), 'w') as f:
        for appid in latest:
            f.write(appid + '\n')

    if unknownapks:
        logging.info('\nUnknown apks:')
        for apk in unknownapks:
            logging.info(apk)

    logging.info("Finished.")
Пример #10
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id to check for updates")
    parser.add_argument("--auto", action="store_true", default=False,
                        help="Process auto-updates")
    parser.add_argument("--autoonly", action="store_true", default=False,
                        help="Only process apps with auto-updates")
    parser.add_argument("--commit", action="store_true", default=False,
                        help="Commit changes")
    parser.add_argument("--gplay", action="store_true", default=False,
                        help="Only print differences with the Play Store")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata()

    apps = common.read_app_args(options.appid, allapps, False)

    if options.gplay:
        for app in apps:
            version, reason = check_gplay(app)
            if version is None:
                if reason == '404':
                    logging.info("{0} is not in the Play Store".format(common.getappname(app)))
                else:
                    logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
            if version is not None:
                stored = app.CurrentVersion
                if not stored:
                    logging.info("{0} has no Current Version but has version {1} on the Play Store"
                                 .format(common.getappname(app), version))
                elif LooseVersion(stored) < LooseVersion(version):
                    logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
                                 .format(common.getappname(app), version, stored))
                else:
                    if stored != version:
                        logging.info("{0} has version {1} on the Play Store, which differs from {2}"
                                     .format(common.getappname(app), version, stored))
                    else:
                        logging.info("{0} has the same version {1} on the Play Store"
                                     .format(common.getappname(app), version))
        return

    for appid, app in apps.iteritems():

        if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
            logging.debug("Nothing to do for {0}...".format(appid))
            continue

        logging.info("Processing " + appid + '...')

        checkupdates_app(app)

    logging.info("Finished.")
Пример #11
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] "
                            "[APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id with optional versioncode in the form APPID[:VERCODE]")
    options = parser.parse_args()

    config = common.read_config(options)

    log_dir = 'logs'
    if not os.path.isdir(log_dir):
        logging.info("Creating log directory")
        os.makedirs(log_dir)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    output_dir = 'repo'
    if not os.path.isdir(output_dir):
        logging.info("Creating output directory")
        os.makedirs(output_dir)

    unsigned_dir = 'unsigned'
    if not os.path.isdir(unsigned_dir):
        logging.warning("No unsigned directory - nothing to do")
        sys.exit(1)

    for f in [config['keystorepassfile'],
              config['keystore'],
              config['keypassfile']]:
        if not os.path.exists(f):
            logging.error("Config error - missing '{0}'".format(f))
            sys.exit(1)

    # It was suggested at
    #    https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit
    # that a package could be crafted, such that it would use the same signing
    # key as an existing app. While it may be theoretically possible for such a
    # colliding package ID to be generated, it seems virtually impossible that
    # the colliding ID would be something that would be a) a valid package ID,
    # and b) a sane-looking ID that would make its way into the repo.
    # Nonetheless, to be sure, before publishing we check that there are no
    # collisions, and refuse to do any publishing if that's the case...
    allapps = metadata.read_metadata()
    vercodes = common.read_pkg_args(options.appid, True)
    allaliases = []
    for appid in allapps:
        m = md5.new()
        m.update(appid)
        keyalias = m.hexdigest()[:8]
        if keyalias in allaliases:
            logging.error("There is a keyalias collision - publishing halted")
            sys.exit(1)
        allaliases.append(keyalias)
    logging.info("{0} apps, {0} key aliases".format(len(allapps),
                                                    len(allaliases)))

    # Process any apks that are waiting to be signed...
    for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):

        appid, vercode = common.apknameinfo(apkfile)
        apkfilename = os.path.basename(apkfile)
        if vercodes and appid not in vercodes:
            continue
        if appid in vercodes and vercodes[appid]:
            if vercode not in vercodes[appid]:
                continue
        logging.info("Processing " + apkfile)

        # There ought to be valid metadata for this app, otherwise why are we
        # trying to publish it?
        if appid not in allapps:
            logging.error("Unexpected {0} found in unsigned directory"
                          .format(apkfilename))
            sys.exit(1)
        app = allapps[appid]

        if app.Binaries is not None:

            # It's an app where we build from source, and verify the apk
            # contents against a developer's binary, and then publish their
            # version if everything checks out.
            # The binary should already have been retrieved during the build
            # process.
            srcapk = apkfile + ".binary"

            # Compare our unsigned one with the downloaded one...
            compare_result = common.verify_apks(srcapk, apkfile, tmp_dir)
            if compare_result:
                logging.error("...verification failed - publish skipped : "
                              + compare_result)
                continue

            # Success! So move the downloaded file to the repo, and remove
            # our built version.
            shutil.move(srcapk, os.path.join(output_dir, apkfilename))
            os.remove(apkfile)

        else:

            # It's a 'normal' app, i.e. we sign and publish it...

            # Figure out the key alias name we'll use. Only the first 8
            # characters are significant, so we'll use the first 8 from
            # the MD5 of the app's ID and hope there are no collisions.
            # If a collision does occur later, we're going to have to
            # come up with a new alogrithm, AND rename all existing keys
            # in the keystore!
            if appid in config['keyaliases']:
                # For this particular app, the key alias is overridden...
                keyalias = config['keyaliases'][appid]
                if keyalias.startswith('@'):
                    m = md5.new()
                    m.update(keyalias[1:])
                    keyalias = m.hexdigest()[:8]
            else:
                m = md5.new()
                m.update(appid)
                keyalias = m.hexdigest()[:8]
            logging.info("Key alias: " + keyalias)

            # See if we already have a key for this application, and
            # if not generate one...
            p = FDroidPopen(['keytool', '-list',
                             '-alias', keyalias, '-keystore', config['keystore'],
                             '-storepass:file', config['keystorepassfile']])
            if p.returncode != 0:
                logging.info("Key does not exist - generating...")
                p = FDroidPopen(['keytool', '-genkey',
                                 '-keystore', config['keystore'],
                                 '-alias', keyalias,
                                 '-keyalg', 'RSA', '-keysize', '2048',
                                 '-validity', '10000',
                                 '-storepass:file', config['keystorepassfile'],
                                 '-keypass:file', config['keypassfile'],
                                 '-dname', config['keydname']])
                # TODO keypass should be sent via stdin
                if p.returncode != 0:
                    raise BuildException("Failed to generate key")

            # Sign the application...
            p = FDroidPopen(['jarsigner', '-keystore', config['keystore'],
                             '-storepass:file', config['keystorepassfile'],
                             '-keypass:file', config['keypassfile'], '-sigalg',
                             'SHA1withRSA', '-digestalg', 'SHA1',
                             apkfile, keyalias])
            # TODO keypass should be sent via stdin
            if p.returncode != 0:
                raise BuildException("Failed to sign application")

            # Zipalign it...
            p = SdkToolsPopen(['zipalign', '-v', '4', apkfile,
                               os.path.join(output_dir, apkfilename)])
            if p.returncode != 0:
                raise BuildException("Failed to align application")
            os.remove(apkfile)

        # Move the source tarball into the output directory...
        tarfilename = apkfilename[:-4] + '_src.tar.gz'
        tarfile = os.path.join(unsigned_dir, tarfilename)
        if os.path.exists(tarfile):
            shutil.move(tarfile, os.path.join(output_dir, tarfilename))

        logging.info('Published ' + apkfilename)
Пример #12
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id with optional versioncode in the form APPID[:VERCODE]")
    options = parser.parse_args()

    config = common.read_config(options)

    # Read all app and srclib metadata
    allapps = metadata.read_metadata()
    apps = common.read_app_args(options.appid, allapps, True)

    probcount = 0

    build_dir = 'build'
    if not os.path.isdir(build_dir):
        logging.info("Creating build directory")
        os.makedirs(build_dir)
    srclib_dir = os.path.join(build_dir, 'srclib')
    extlib_dir = os.path.join(build_dir, 'extlib')

    for appid, app in apps.iteritems():

        if app.Disabled:
            logging.info("Skipping %s: disabled" % appid)
            continue
        if not app.builds:
            logging.info("Skipping %s: no builds specified" % appid)
            continue

        logging.info("Processing " + appid)

        try:

            if app.RepoType == 'srclib':
                build_dir = os.path.join('build', 'srclib', app.Repo)
            else:
                build_dir = os.path.join('build', appid)

            # Set up vcs interface and make sure we have the latest code...
            vcs = common.getvcs(app.RepoType, app.Repo, build_dir)

            for build in app.builds:

                if build.disable:
                    logging.info("...skipping version %s - %s" % (
                        build.version, build.get('disable', build.commit[1:])))
                else:
                    logging.info("...scanning version " + build.version)

                    # Prepare the source code...
                    root_dir, _ = common.prepare_source(vcs, app, build,
                                                        build_dir, srclib_dir,
                                                        extlib_dir, False)

                    # Do the scan...
                    count = scan_source(build_dir, root_dir, build)
                    if count > 0:
                        logging.warn('Scanner found %d problems in %s (%s)' % (
                            count, appid, build.vercode))
                        probcount += count

        except BuildException as be:
            logging.warn("Could not scan app %s due to BuildException: %s" % (
                appid, be))
            probcount += 1
        except VCSException as vcse:
            logging.warn("VCS error while scanning app %s: %s" % (appid, vcse))
            probcount += 1
        except Exception:
            logging.warn("Could not scan app %s due to unknown error: %s" % (
                appid, traceback.format_exc()))
            probcount += 1

    logging.info("Finished:")
    print("%d problems found" % probcount)
Пример #13
0
def main():

    global options, config

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id with optional versioncode in the form APPID[:VERCODE]")
    options = parser.parse_args()

    config = common.read_config(options)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    unsigned_dir = 'unsigned'
    if not os.path.isdir(unsigned_dir):
        logging.error("No unsigned directory - nothing to do")
        sys.exit(0)

    verified = 0
    notverified = 0

    vercodes = common.read_pkg_args(options.appid, True)

    for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):

        apkfilename = os.path.basename(apkfile)
        appid, vercode = common.apknameinfo(apkfile)

        if vercodes and appid not in vercodes:
            continue
        if vercodes[appid] and vercode not in vercodes[appid]:
            continue

        try:

            logging.info("Processing " + apkfilename)

            remoteapk = os.path.join(tmp_dir, apkfilename)
            if os.path.exists(remoteapk):
                os.remove(remoteapk)
            url = 'https://f-droid.org/repo/' + apkfilename
            logging.info("...retrieving " + url)
            net.download_file(url, dldir=tmp_dir)

            compare_result = common.compare_apks(
                os.path.join(unsigned_dir, apkfilename),
                remoteapk,
                tmp_dir)
            if compare_result:
                raise FDroidException(compare_result)

            logging.info("...successfully verified")
            verified += 1

        except FDroidException as e:
            logging.info("...NOT verified - {0}".format(e))
            notverified += 1

    logging.info("Finished")
    logging.info("{0} successfully verified".format(verified))
    logging.info("{0} NOT verified".format(notverified))
Пример #14
0
def main():

    global options, config

    # Parse command line...
    parser = ArgumentParser()
    common.setup_global_opts(parser)
    parser.add_argument("-d", "--distinguished-name", default=None,
                        help="X.509 'Distiguished Name' used when generating keys")
    parser.add_argument("--keystore", default=None,
                        help="Path to the keystore for the repo signing key")
    parser.add_argument("--repo-keyalias", default=None,
                        help="Alias of the repo signing key in the keystore")
    parser.add_argument("--android-home", default=None,
                        help="Path to the Android SDK (sometimes set in ANDROID_HOME)")
    parser.add_argument("--no-prompt", action="store_true", default=False,
                        help="Do not prompt for Android SDK path, just fail")
    options = parser.parse_args()

    # find root install prefix
    tmp = os.path.dirname(sys.argv[0])
    if os.path.basename(tmp) == 'bin':
        prefix = None
        egg_link = os.path.join(tmp, '..', 'local/lib/python2.7/site-packages/fdroidserver.egg-link')
        if os.path.exists(egg_link):
            # installed from local git repo
            examplesdir = os.path.join(open(egg_link).readline().rstrip(), 'examples')
        else:
            prefix = os.path.dirname(os.path.dirname(__file__))  # use .egg layout
            if not prefix.endswith('.egg'):  # use UNIX layout
                prefix = os.path.dirname(tmp)
            examplesdir = prefix + '/share/doc/fdroidserver/examples'
    else:
        # we're running straight out of the git repo
        prefix = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
        examplesdir = prefix + '/examples'

    aapt = None
    fdroiddir = os.getcwd()
    test_config = dict()
    common.fill_config_defaults(test_config)

    # track down where the Android SDK is, the default is to use the path set
    # in ANDROID_HOME if that exists, otherwise None
    if options.android_home is not None:
        test_config['sdk_path'] = options.android_home
    elif not common.test_sdk_exists(test_config):
        if os.path.isfile('/usr/bin/aapt'):
            # remove sdk_path and build_tools, they are not required
            test_config.pop('sdk_path', None)
            test_config.pop('build_tools', None)
            # make sure at least aapt is found, since this can't do anything without it
            test_config['aapt'] = common.find_sdk_tools_cmd('aapt')
        else:
            # if neither --android-home nor the default sdk_path exist, prompt the user
            default_sdk_path = '/opt/android-sdk'
            while not options.no_prompt:
                try:
                    s = raw_input('Enter the path to the Android SDK ('
                                  + default_sdk_path + ') here:\n> ')
                except KeyboardInterrupt:
                    print('')
                    sys.exit(1)
                if re.match('^\s*$', s) is not None:
                    test_config['sdk_path'] = default_sdk_path
                else:
                    test_config['sdk_path'] = s
                if common.test_sdk_exists(test_config):
                    break
    if not common.test_sdk_exists(test_config):
        sys.exit(3)

    if not os.path.exists('config.py'):
        # 'metadata' and 'tmp' are created in fdroid
        if not os.path.exists('repo'):
            os.mkdir('repo')
        shutil.copy(os.path.join(examplesdir, 'fdroid-icon.png'), fdroiddir)
        shutil.copyfile(os.path.join(examplesdir, 'config.py'), 'config.py')
        os.chmod('config.py', 0o0600)
        # If android_home is None, test_config['sdk_path'] will be used and
        # "$ANDROID_HOME" may be used if the env var is set up correctly.
        # If android_home is not None, the path given from the command line
        # will be directly written in the config.
        if 'sdk_path' in test_config:
            common.write_to_config(test_config, 'sdk_path', options.android_home)
    else:
        logging.warn('Looks like this is already an F-Droid repo, cowardly refusing to overwrite it...')
        logging.info('Try running `fdroid init` in an empty directory.')
        sys.exit()

    if 'aapt' not in test_config or not os.path.isfile(test_config['aapt']):
        # try to find a working aapt, in all the recent possible paths
        build_tools = os.path.join(test_config['sdk_path'], 'build-tools')
        aaptdirs = []
        aaptdirs.append(os.path.join(build_tools, test_config['build_tools']))
        aaptdirs.append(build_tools)
        for f in os.listdir(build_tools):
            if os.path.isdir(os.path.join(build_tools, f)):
                aaptdirs.append(os.path.join(build_tools, f))
        for d in sorted(aaptdirs, reverse=True):
            if os.path.isfile(os.path.join(d, 'aapt')):
                aapt = os.path.join(d, 'aapt')
                break
        if os.path.isfile(aapt):
            dirname = os.path.basename(os.path.dirname(aapt))
            if dirname == 'build-tools':
                # this is the old layout, before versioned build-tools
                test_config['build_tools'] = ''
            else:
                test_config['build_tools'] = dirname
            common.write_to_config(test_config, 'build_tools')
        common.ensure_build_tools_exists(test_config)

    # now that we have a local config.py, read configuration...
    config = common.read_config(options)

    # the NDK is optional and there may be multiple versions of it, so it's
    # left for the user to configure

    # find or generate the keystore for the repo signing key. First try the
    # path written in the default config.py.  Then check if the user has
    # specified a path from the command line, which will trump all others.
    # Otherwise, create ~/.local/share/fdroidserver and stick it in there.  If
    # keystore is set to NONE, that means that Java will look for keys in a
    # Hardware Security Module aka Smartcard.
    keystore = config['keystore']
    if options.keystore:
        keystore = os.path.abspath(options.keystore)
        if options.keystore == 'NONE':
            keystore = options.keystore
        else:
            keystore = os.path.abspath(options.keystore)
            if not os.path.exists(keystore):
                logging.info('"' + keystore
                             + '" does not exist, creating a new keystore there.')
    common.write_to_config(test_config, 'keystore', keystore)
    repo_keyalias = None
    if options.repo_keyalias:
        repo_keyalias = options.repo_keyalias
        common.write_to_config(test_config, 'repo_keyalias', repo_keyalias)
    if options.distinguished_name:
        keydname = options.distinguished_name
        common.write_to_config(test_config, 'keydname', keydname)
    if keystore == 'NONE':  # we're using a smartcard
        common.write_to_config(test_config, 'repo_keyalias', '1')  # seems to be the default
        disable_in_config('keypass', 'never used with smartcard')
        common.write_to_config(test_config, 'smartcardoptions',
                               ('-storetype PKCS11 -providerName SunPKCS11-OpenSC '
                                + '-providerClass sun.security.pkcs11.SunPKCS11 '
                                + '-providerArg opensc-fdroid.cfg'))
        # find opensc-pkcs11.so
        if not os.path.exists('opensc-fdroid.cfg'):
            if os.path.exists('/usr/lib/opensc-pkcs11.so'):
                opensc_so = '/usr/lib/opensc-pkcs11.so'
            elif os.path.exists('/usr/lib64/opensc-pkcs11.so'):
                opensc_so = '/usr/lib64/opensc-pkcs11.so'
            else:
                files = glob.glob('/usr/lib/' + os.uname()[4] + '-*-gnu/opensc-pkcs11.so')
                if len(files) > 0:
                    opensc_so = files[0]
                else:
                    opensc_so = '/usr/lib/opensc-pkcs11.so'
                    logging.warn('No OpenSC PKCS#11 module found, ' +
                                 'install OpenSC then edit "opensc-fdroid.cfg"!')
            with open(os.path.join(examplesdir, 'opensc-fdroid.cfg'), 'r') as f:
                opensc_fdroid = f.read()
            opensc_fdroid = re.sub('^library.*', 'library = ' + opensc_so, opensc_fdroid,
                                   flags=re.MULTILINE)
            with open('opensc-fdroid.cfg', 'w') as f:
                f.write(opensc_fdroid)
    elif not os.path.exists(keystore):
        password = common.genpassword()
        c = dict(test_config)
        c['keystorepass'] = password
        c['keypass'] = password
        c['repo_keyalias'] = socket.getfqdn()
        c['keydname'] = 'CN=' + c['repo_keyalias'] + ', OU=F-Droid'
        common.write_to_config(test_config, 'keystorepass', password)
        common.write_to_config(test_config, 'keypass', password)
        common.write_to_config(test_config, 'repo_keyalias', c['repo_keyalias'])
        common.write_to_config(test_config, 'keydname', c['keydname'])
        common.genkeystore(c)

    logging.info('Built repo based in "' + fdroiddir + '"')
    logging.info('with this config:')
    logging.info('  Android SDK:\t\t\t' + config['sdk_path'])
    if aapt:
        logging.info('  Android SDK Build Tools:\t' + os.path.dirname(aapt))
    logging.info('  Android NDK r10e (optional):\t$ANDROID_NDK')
    logging.info('  Keystore for signing key:\t' + keystore)
    if repo_keyalias is not None:
        logging.info('  Alias for key in store:\t' + repo_keyalias)
    logging.info('\nTo complete the setup, add your APKs to "' +
                 os.path.join(fdroiddir, 'repo') + '"' + '''
then run "fdroid update -c; fdroid update".  You might also want to edit
"config.py" to set the URL, repo name, and more.  You should also set up
a signing key (a temporary one might have been automatically generated).

For more info: https://f-droid.org/manual/fdroid.html#Simple-Binary-Repository
and https://f-droid.org/manual/fdroid.html#Signing
''')
Пример #15
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser()
    common.setup_global_opts(parser)
    parser.add_argument("-u", "--url", default=None,
                        help="Project URL to import from.")
    parser.add_argument("-s", "--subdir", default=None,
                        help="Path to main android project subdirectory, if not in root.")
    parser.add_argument("--rev", default=None,
                        help="Allows a different revision (or git branch) to be specified for the initial import")
    options = parser.parse_args()

    config = common.read_config(options)

    apps = metadata.read_metadata()
    app = metadata.App()
    app.UpdateCheckMode = "Tags"

    root_dir = None
    build_dir = None

    if options.url:
        root_dir, build_dir = get_metadata_from_url(app, options.url)
    elif os.path.isdir('.git'):
        if options.url:
            app.WebSite = options.url
        root_dir = get_subdir(os.getcwd())
    else:
        logging.error("Specify project url.")
        sys.exit(1)

    # Extract some information...
    paths = common.manifest_paths(root_dir, [])
    if paths:

        version, vercode, package = common.parse_androidmanifests(paths, app)
        if not package:
            logging.error("Couldn't find package ID")
            sys.exit(1)
        if not version:
            logging.warn("Couldn't find latest version name")
        if not vercode:
            logging.warn("Couldn't find latest version code")
    else:
        spec = os.path.join(root_dir, 'buildozer.spec')
        if os.path.exists(spec):
            defaults = {'orientation': 'landscape', 'icon': '',
                        'permissions': '', 'android.api': "18"}
            bconfig = ConfigParser(defaults, allow_no_value=True)
            bconfig.read(spec)
            package = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name')
            version = bconfig.get('app', 'version')
            vercode = None
        else:
            logging.error("No android or kivy project could be found. Specify --subdir?")
            sys.exit(1)

    # Make sure it's actually new...
    if package in apps:
        logging.error("Package " + package + " already exists")
        sys.exit(1)

    # Create a build line...
    build = metadata.Build()
    build.version = version or '?'
    build.vercode = vercode or '?'
    build.commit = '?'
    build.disable = 'Generated by import.py - check/set version fields and commit id'
    if options.subdir:
        build.subdir = options.subdir
    if os.path.exists(os.path.join(root_dir, 'jni')):
        build.buildjni = ['yes']

    app.builds.append(build)

    # Keep the repo directory to save bandwidth...
    if not os.path.exists('build'):
        os.mkdir('build')
    if build_dir is not None:
        shutil.move(build_dir, os.path.join('build', package))
    with open('build/.fdroidvcs-' + package, 'w') as f:
        f.write(app.RepoType + ' ' + app.Repo)

    metadatapath = os.path.join('metadata', package + '.txt')
    with open(metadatapath, 'w') as f:
        metadata.write_metadata('txt', f, app)
    logging.info("Wrote " + metadatapath)
Пример #16
0
def main():
    global config, options

    # Parse command line...
    parser = ArgumentParser()
    common.setup_global_opts(parser)
    parser.add_argument("command", help="command to execute, either 'init' or 'update'")
    parser.add_argument("-i", "--identity-file", default=None,
                        help="Specify an identity file to provide to SSH for rsyncing")
    parser.add_argument("--local-copy-dir", default=None,
                        help="Specify a local folder to sync the repo to")
    parser.add_argument("--sync-from-local-copy-dir", action="store_true", default=False,
                        help="Before uploading to servers, sync from local copy dir")
    parser.add_argument("--no-checksum", action="store_true", default=False,
                        help="Don't use rsync checksums")
    options = parser.parse_args()

    config = common.read_config(options)

    if options.command != 'init' and options.command != 'update':
        logging.critical("The only commands currently supported are 'init' and 'update'")
        sys.exit(1)

    if config.get('nonstandardwebroot') is True:
        standardwebroot = False
    else:
        standardwebroot = True

    for serverwebroot in config.get('serverwebroot', []):
        # this supports both an ssh host:path and just a path
        s = serverwebroot.rstrip('/').split(':')
        if len(s) == 1:
            fdroiddir = s[0]
        elif len(s) == 2:
            host, fdroiddir = s
        else:
            logging.error('Malformed serverwebroot line: ' + serverwebroot)
            sys.exit(1)
        repobase = os.path.basename(fdroiddir)
        if standardwebroot and repobase != 'fdroid':
            logging.error('serverwebroot path does not end with "fdroid", '
                          + 'perhaps you meant one of these:\n\t'
                          + serverwebroot.rstrip('/') + '/fdroid\n\t'
                          + serverwebroot.rstrip('/').rstrip(repobase) + 'fdroid')
            sys.exit(1)

    if options.local_copy_dir is not None:
        local_copy_dir = options.local_copy_dir
    elif config.get('local_copy_dir'):
        local_copy_dir = config['local_copy_dir']
    else:
        local_copy_dir = None
    if local_copy_dir is not None:
        fdroiddir = local_copy_dir.rstrip('/')
        if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
            logging.error('local_copy_dir must be directory, not a file!')
            sys.exit(1)
        if not os.path.exists(os.path.dirname(fdroiddir)):
            logging.error('The root dir for local_copy_dir "'
                          + os.path.dirname(fdroiddir)
                          + '" does not exist!')
            sys.exit(1)
        if not os.path.isabs(fdroiddir):
            logging.error('local_copy_dir must be an absolute path!')
            sys.exit(1)
        repobase = os.path.basename(fdroiddir)
        if standardwebroot and repobase != 'fdroid':
            logging.error('local_copy_dir does not end with "fdroid", '
                          + 'perhaps you meant: ' + fdroiddir + '/fdroid')
            sys.exit(1)
        if local_copy_dir[-1] != '/':
            local_copy_dir += '/'
        local_copy_dir = local_copy_dir.replace('//', '/')
        if not os.path.exists(fdroiddir):
            os.mkdir(fdroiddir)

    if not config.get('awsbucket') \
            and not config.get('serverwebroot') \
            and local_copy_dir is None:
        logging.warn('No serverwebroot, local_copy_dir, or awsbucket set!'
                     + 'Edit your config.py to set at least one.')
        sys.exit(1)

    repo_sections = ['repo']
    if config['archive_older'] != 0:
        repo_sections.append('archive')
        if not os.path.exists('archive'):
            os.mkdir('archive')
    if config['per_app_repos']:
        repo_sections += common.get_per_app_repos()

    if options.command == 'init':
        ssh = paramiko.SSHClient()
        ssh.load_system_host_keys()
        for serverwebroot in config.get('serverwebroot', []):
            sshstr, remotepath = serverwebroot.rstrip('/').split(':')
            if sshstr.find('@') >= 0:
                username, hostname = sshstr.split('@')
            else:
                username = pwd.getpwuid(os.getuid())[0]  # get effective uid
                hostname = sshstr
            ssh.connect(hostname, username=username)
            sftp = ssh.open_sftp()
            if os.path.basename(remotepath) \
                    not in sftp.listdir(os.path.dirname(remotepath)):
                sftp.mkdir(remotepath, mode=0755)
            for repo_section in repo_sections:
                repo_path = os.path.join(remotepath, repo_section)
                if os.path.basename(repo_path) \
                        not in sftp.listdir(remotepath):
                    sftp.mkdir(repo_path, mode=0755)
            sftp.close()
            ssh.close()
    elif options.command == 'update':
        for repo_section in repo_sections:
            if local_copy_dir is not None:
                if config['sync_from_local_copy_dir'] and os.path.exists(repo_section):
                    sync_from_localcopy(repo_section, local_copy_dir)
                else:
                    update_localcopy(repo_section, local_copy_dir)
            for serverwebroot in config.get('serverwebroot', []):
                update_serverwebroot(serverwebroot, repo_section)
            if config.get('awsbucket'):
                update_awsbucket(repo_section)

    sys.exit(0)