Ejemplo n.º 1
0
def main():

    parser = OptionParser(usage="Usage: %prog")
    parser.parse_args()
    common.read_config(None)

    metadata.read_metadata(xref=True)
Ejemplo n.º 2
0
def main():

    parser = OptionParser(usage="Usage: %prog")
    parser.parse_args()
    common.read_config(None)

    metadata.read_metadata(xref=True)
Ejemplo n.º 3
0
def main():

    parser = ArgumentParser(usage="%(prog)s")
    common.setup_global_opts(parser)
    parser.parse_args()
    common.read_config(None)

    metadata.read_metadata(xref=True)
Ejemplo n.º 4
0
 def test_metadata_key_first_character_is_alnum(self):
     """metadata key starts with an ASCII letter or a number"""
     metadata = read_metadata(StringIO(dedent("""\
         wharrgarbl:this is metadata.
         """)))
     self.assertEqual(metadata, {'wharrgarbl': 'this is metadata.'})
     metadata = read_metadata(StringIO(dedent("""\
         2be or not 2 be:this is also metadata.
         """)))
     self.assertEqual(metadata, {'2beornot2be': 'this is also metadata.'})
     metadata = read_metadata(StringIO(dedent("""\
         -: this is not metadata.
         nometa: this
         """)))
     self.assertEqual(metadata, {})
     self.assertFalse(metadata.has_key('nometa'))
Ejemplo n.º 5
0
    def test_multiline_values(self):
        """after the colon comes the metadata value, which can consist of pretty much any characters (including new lines)."""
        """multiline metadata values are processed correctly."""
        """If your metadata value includes a colon, it must be indented to keep it from being treated as a new key-value pair."""
        source = StringIO(dedent("""\
            multiline-key:This is the text of the first line of the key,
            and this is the second line of the key.
                funny enough, there's also a third line with some intentation.
            simple-key:justoneword
            another multiline key:here we go
                with another line with a colon: yay

            nometa: this
        """))
        metadata = read_metadata(source)
        self.assertDictContainsSubset(
            {'multiline-key': dedent("""\
                This is the text of the first line of the key,
                and this is the second line of the key.
                    funny enough, there's also a third line with some intentation."""),
             'simple-key': 'justoneword',
             'anothermultilinekey': dedent("""\
                here we go
                    with another line with a colon: yay""")
             },
            metadata, 
            #repr(metadata)
            )
Ejemplo n.º 6
0
 def test_metadata_key_at_line_start(self):
     """metadata key must begin at the beginning of the line"""
     metadata = read_metadata(StringIO(dedent(""" this is not metadata.
         nometa: this
         """)))
     self.assertEqual(metadata, {})
     self.assertFalse(metadata.has_key('nometa'))
Ejemplo n.º 7
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-p", "--package", default=None,
                      help="Process only the specified package")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    apps = metadata.read_metadata(package=options.package, xref=False)

    if len(apps) == 0 and options.package:
        print "No such package"
        sys.exit(1)

    for app in apps:
        print "Writing " + app['id']
        metadata.write_metadata(os.path.join('metadata', app['id']) + '.txt', app)

    print "Finished."
Ejemplo n.º 8
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(args, allapps, False)

    for appid, app in apps.iteritems():
        logging.info("Writing " + appid)
        metadata.write_metadata(os.path.join('metadata', appid) + '.txt', app)

    logging.info("Finished.")
Ejemplo n.º 9
0
def process_request(request): 
    metadata = read_metadata(METADATA)
    db = setup_database(metadata)
    table = metadata["table"]
    button_data = [{"name": r.name, "selected": False, "importance": -1} for r in metadata["relations"].values()]
    design = None
    if request.method == "POST":
        selection_data = json.loads(request.form["relations"])
        apt_input = sorted(selection_data, key=lambda x: int(x["importance"]))
        apt_input = [metadata["relations"][s["name"]] for s in apt_input]
        query, query_params = construct_test_query(apt_input, table) 
        if len(apt_input) == 1:
            r = apt_input[0]
            labels = ["APTREMAKEID"] + [r.determinant.name, r.dependent.name]
        else:    
            r = apt_input[0]
            labels = ["APTREMAKEID", r.determinant.name] + [r.dependent.name for r in apt_input]
        view = View(apt_input, db, query, query_params, labels)
        try:
            design = generate_presentation(view, limit=1).next()
        except StopIteration: # nothing generated
            pass
        for s in selection_data:
            metadata["relations"][s["name"]].selected = True
            metadata["relations"][s["name"]].importance = s["importance"]
        button_data = [{"name": r.name, "selected": r.selected, "importance": r.importance} for r in metadata["relations"].values()]
    button_data.sort(key=lambda x: len(x["name"]))
    return design, button_data
Ejemplo n.º 10
0
    def __init__(self, source_path, target, output_type):
        self.source_path = source_path
        self.target = target
        self.type = output_type

        self.source = file(self.source_path, 'r')
        self.transcludebase = os.path.dirname(source_path)

        self.metadata = read_metadata(self.source)
Ejemplo n.º 11
0
    def test_seek_to_zero_if_first_line_has_no_key(self):
        source = StringIO(dedent("""\
            this is not metadata.

            nometa: this
            """))
        metadata = read_metadata(source)
        self.assertEqual(metadata, {})
        self.assertFalse(metadata.has_key('nometa'))
        self.assertEqual('this is not metadata.\n', source.readline())
Ejemplo n.º 12
0
    def test_no_blank_line_before(self):
        """metadata must begin at the very top of the document, no blank lines can precede it."""

        source = StringIO(dedent("""\

            nometa: this
            """))
        metadata = read_metadata(source)
        self.assertEqual(metadata, {})
        self.assertFalse(metadata.has_key('nometa'))
        self.assertEqual('\n', source.readline())
Ejemplo n.º 13
0
def main():

    global config, options

    anywarns = False

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("-f", "--format", action="store_true", default=False,
                        help="Also warn about formatting issues, like rewritemeta -l")
    parser.add_argument("appid", nargs='*', help="app-id in the form APPID")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(options.appid, allapps, False)

    for appid, app in apps.iteritems():
        if app.Disabled:
            continue

        warns = []

        for check_func in [
                check_regexes,
                check_ucm_tags,
                check_char_limits,
                check_old_links,
                check_checkupdates_ran,
                check_useless_fields,
                check_empty_fields,
                check_categories,
                check_duplicates,
                check_mediawiki_links,
                check_bulleted_lists,
                check_builds,
                ]:
            warns += check_func(app)

        if options.format:
            if not rewritemeta.proper_format(app):
                warns.append("Run rewritemeta to fix formatting")

        if warns:
            anywarns = True
            for warn in warns:
                print("%s: %s" % (appid, warn))

    if anywarns:
        sys.exit(1)
Ejemplo n.º 14
0
    def test_(self):
        """after the metadata is finished, a blank line triggers the beginning of the rest of the document."""

        source = StringIO(dedent("""\
            foo:bar:baz

            nometa: this
        """))
        metadata = read_metadata(source)
        self.assertDictContainsSubset(
            {'foo': 'bar:baz'}, metadata, repr(metadata))
        self.assertFalse(metadata.has_key('nometa'))
        self.assertEqual('nometa: this\n', source.readline())
Ejemplo n.º 15
0
    def test_followed_by_dots(self):
        """The line after the metadata can also be ..."""

        source = StringIO(dedent("""\
            foo:bar:baz
            ...
            nometa: this
            """))
        metadata = read_metadata(source)
        self.assertDictContainsSubset(
            {'foo': 'bar:baz'}, metadata, repr(metadata))
        self.assertFalse(metadata.has_key('nometa'))
        self.assertEqual('nometa: this\n', source.readline())
Ejemplo n.º 16
0
    def test_prefix_dashes(self):
        """There can optionally be a --- on the line before the metadata."""

        source = StringIO(dedent("""\
            ---
            foo:bar:baz

            nometa: this
        """))
        metadata = read_metadata(source)
        self.assertDictContainsSubset(
            {'foo': 'bar:baz'}, metadata, repr(metadata))
        self.assertFalse(metadata.has_key('nometa'))
        self.assertEqual('nometa: this\n', source.readline())
Ejemplo n.º 17
0
    def test_keys_are_lowercased_and_stripped_of_spaces(self):
        """Metadata keys are case insensitive and stripped of all spaces during processing."""

        source = StringIO(dedent("""\
            this is one key:first
            tHis IsAno th   ER:second

            nometa: this
        """))
        metadata = read_metadata(source)
        self.assertDictContainsSubset(
            {'thisisonekey': 'first',
             'thisisanother': 'second'},
            metadata, repr(metadata))
Ejemplo n.º 18
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("-l", "--list", action="store_true", default=False,
                        help="List files that would be reformatted")
    parser.add_argument("-t", "--to", default=None,
                        help="Rewrite to a specific format")
    parser.add_argument("appid", nargs='*', help="app-id in the form APPID")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(options.appid, allapps, False)

    if options.list and options.to is not None:
        parser.error("Cannot use --list and --to at the same time")

    supported = ['txt', 'yaml']

    if options.to is not None and options.to not in supported:
        parser.error("Must give a valid format to --to")

    for appid, app in apps.iteritems():
        base, ext = common.get_extension(app.metadatapath)
        if not options.to and ext not in supported:
            logging.info("Ignoring %s file at '%s'" % (ext, app.metadatapath))
            continue

        to_ext = ext
        if options.to is not None:
            to_ext = options.to

        if options.list:
            if not proper_format(app):
                print app.metadatapath
            continue

        with open(base + '.' + to_ext, 'w') as f:
            metadata.write_metadata(to_ext, f, app)

        if ext != to_ext:
            os.remove(app.metadatapath)

    logging.debug("Finished.")
Ejemplo n.º 19
0
def create_dataset_dirs(base_dir, current_dataset_id):
    """
    Function to facilitate creation of sub-directories for datasets.
    This function is called when a dataset being processed encounters a restructured dataset with the same date
    and location. They may be separate collections or duplicates. If separate collections, we want to make sure to
    save data from each! Otherwise thrown an error or something.

    Parameters:
        base_dir - path to directory containing the already existing dataset that conflicts with the current dataset.
        current_dataset_id - String. The dataset id of the dataset currently being processed.

    Returns:
        new_dir - path to the new directory the dataset currently being processed will reside within.
    """

    # Read metadata from existing dataset
    metadata = meta.read_metadata(os.path.join(base_dir, 'Metadata.csv'))
    # Get the dataset id from the existing dataset
    existing_dataset_id = metadata['Dataset ID']

    # Ensure current dataset id != existing dataset id
    if existing_dataset_id == current_dataset_id:
        # For now, we will create a copy for further investigation.

        # Warn that this is happening.
        warn_str = 'DUPLICATE DATASETS DETECTED IN {0}. DATASET ID {1}'.format(base_dir, existing_dataset_id)
        warnings.warn(warn_str)
        logging.warning(warn_str)

        # Modify the dataset ids of the two datasets so they are different
        #   (existing will have _1 appended to end and current will have _2 appended to end).
        existing_dataset_id += '_1'
        current_dataset_id += '_2'

    # Move the current contents of directory to the new, dataset id based directory.
    new_existing_dir = os.path.join(base_dir, existing_dataset_id.replace(':', ''))
    dirlist = os.listdir(base_dir)
    os.makedirs(new_existing_dir)
    for element in dirlist:
        shutil.move(os.path.join(base_dir, element),
                    os.path.join(new_existing_dir, element))

    # Define a new location directory using the current dataset id.
    #   For now, also replaece the :'s between timestamp elements with nothing.
    new_dir = os.path.join(base_dir, current_dataset_id.replace(':', ''))
    # Make the directory.
    os.makedirs(new_dir)

    return new_dir
Ejemplo n.º 20
0
def main():
    options = parse_arguments()
    output_folder = os.path.expanduser(options.output_folder)
    if not os.path.exists(output_folder):
        raise OSError("Output folder not found:" % output_folder)

    if options.is_email:
        gd_client = login(options.source)
        albums = download_metadata(gd_client)
    else:
        albums = read_metadata(options.source)

    if options.command == "download":
        download_photos(albums, output_folder)
    elif options.command == "check":
        reports = check_photos(albums, output_folder)
        generate_html(reports, REPORT_FILEPATH)
        webbrowser.open_new_tab(url_from_path(REPORT_FILEPATH))
    elif options.command == "metadata":
        pass
Ejemplo n.º 21
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=True)
    apps = common.read_app_args(args, allapps, False)

    for appid, app in apps.iteritems():
        logging.info("Writing " + appid)
        metadata.write_metadata(os.path.join('metadata', appid) + '.txt', app)

    logging.info("Finished.")
Ejemplo n.º 22
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("--auto", action="store_true", default=False,
                      help="Process auto-updates")
    parser.add_option("--autoonly", action="store_true", default=False,
                      help="Only process apps with auto-updates")
    parser.add_option("--commit", action="store_true", default=False,
                      help="Commit changes")
    parser.add_option("--gplay", action="store_true", default=False,
                      help="Only print differences with the Play Store")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata()

    apps = common.read_app_args(args, allapps, False)

    if options.gplay:
        for app in apps:
            version, reason = check_gplay(app)
            if version is None:
                if reason == '404':
                    logging.info("{0} is not in the Play Store".format(common.getappname(app)))
                else:
                    logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
            if version is not None:
                stored = app['Current Version']
                if not stored:
                    logging.info("{0} has no Current Version but has version {1} on the Play Store"
                                 .format(common.getappname(app), version))
                elif LooseVersion(stored) < LooseVersion(version):
                    logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
                                 .format(common.getappname(app), version, stored))
                else:
                    if stored != version:
                        logging.info("{0} has version {1} on the Play Store, which differs from {2}"
                                     .format(common.getappname(app), version, stored))
                    else:
                        logging.info("{0} has the same version {1} on the Play Store"
                                     .format(common.getappname(app), version))
        return

    for appid, app in apps.iteritems():

        if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
            logging.debug("Nothing to do for {0}...".format(appid))
            continue

        logging.info("Processing " + appid + '...')

        checkupdates_app(app)

    logging.info("Finished.")
Ejemplo n.º 23
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-u", "--url", default=None,
                      help="Project URL to import from.")
    parser.add_option("-s", "--subdir", default=None,
                      help="Path to main android project subdirectory, if not in root.")
    parser.add_option("-r", "--repo", default=None,
                      help="Allows a different repo to be specified for a multi-repo google code project")
    parser.add_option("--rev", default=None,
                      help="Allows a different revision (or git branch) to be specified for the initial import")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if not options.url:
        print "Specify project url."
        sys.exit(1)
    url = options.url

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        print "Creating temporary directory"
        os.makedirs(tmp_dir)

    # Get all apps...
    apps = metadata.read_metadata()

    # Figure out what kind of project it is...
    projecttype = None
    issuetracker = None
    license = None
    website = url #by default, we might override it
    if url.startswith('git://'):
        projecttype = 'git'
        repo = url
        repotype = 'git'
        sourcecode = ""
        website = ""
    elif url.startswith('https://github.com'):
        if url.endswith('/'):
            url = url[:-1]
        if url.endswith('.git'):
            print "A github URL should point to the project, not the git repo"
            sys.exit(1)
        projecttype = 'github'
        repo = url + '.git'
        repotype = 'git'
        sourcecode = url
        issuetracker = url + '/issues'
    elif url.startswith('https://gitorious.org/'):
        projecttype = 'gitorious'
        repo = 'https://git.gitorious.org/' + url[22:] + '.git'
        repotype = 'git'
        sourcecode = url
    elif url.startswith('https://bitbucket.org/'):
        if url.endswith('/'):
            url = url[:-1]
        projecttype = 'bitbucket'
        sourcecode = url + '/src'
        issuetracker = url + '/issues'
        # Figure out the repo type and adddress...
        repotype, repo = getrepofrompage(sourcecode)
        if not repotype:
            print "Unable to determine vcs type. " + repo
            sys.exit(1)
    elif url.startswith('http://code.google.com/p/'):
        if not url.endswith('/'):
            url += '/';
        projecttype = 'googlecode'
        sourcecode = url + 'source/checkout'
        if options.repo:
            sourcecode += "?repo=" + options.repo
        issuetracker = url + 'issues/list'

        # Figure out the repo type and adddress...
        repotype, repo = getrepofrompage(sourcecode)
        if not repotype:
            print "Unable to determine vcs type. " + repo
            sys.exit(1)

        # Figure out the license...
        req = urllib.urlopen(url)
        if req.getcode() != 200:
            print 'Unable to find project page at ' + sourcecode + ' - return code ' + str(req.getcode())
            sys.exit(1)
        page = req.read()
        index = page.find('Code license')
        if index == -1:
            print "Couldn't find license data"
            sys.exit(1)
        ltext = page[index:]
        lprefix = 'rel="nofollow">'
        index = ltext.find(lprefix)
        if index == -1:
            print "Couldn't find license text"
            sys.exit(1)
        ltext = ltext[index + len(lprefix):]
        index = ltext.find('<')
        if index == -1:
            print "License text not formatted as expected"
            sys.exit(1)
        ltext = ltext[:index]
        if ltext == 'GNU GPL v3':
            license = 'GPLv3'
        elif ltext == 'GNU GPL v2':
            license = 'GPLv2'
        elif ltext == 'Apache License 2.0':
            license = 'Apache2'
        elif ltext == 'MIT License':
            license = 'MIT'
        elif ltext == 'GNU Lesser GPL':
            license = 'LGPL'
        elif ltext == 'Mozilla Public License 1.1':
            license = 'MPL'
        elif ltext == 'New BSD License':
            license = 'NewBSD'
        else:
            print "License " + ltext + " is not recognised"
            sys.exit(1)

    if not projecttype:
        print "Unable to determine the project type."
        print "The URL you supplied was not in one of the supported formats. Please consult"
        print "the manual for a list of supported formats, and supply one of those."
        sys.exit(1)

    # Get a copy of the source so we can extract some info...
    print 'Getting source from ' + repotype + ' repo at ' + repo
    src_dir = os.path.join(tmp_dir, 'importer')
    if os.path.exists(src_dir):
        shutil.rmtree(src_dir)
    vcs = common.getvcs(repotype, repo, src_dir)
    vcs.gotorevision(options.rev)
    if options.subdir:
        root_dir = os.path.join(src_dir, options.subdir)
    else:
        root_dir = src_dir

    # Extract some information...
    paths = common.manifest_paths(root_dir, None)
    if paths:

        version, vercode, package = common.parse_androidmanifests(paths)
        if not package:
            print "Couldn't find package ID"
            sys.exit(1)
        if not version:
            print "WARNING: Couldn't find latest version name"
        if not vercode:
            print "WARNING: Couldn't find latest version code"
    else:
        spec = os.path.join(root_dir, 'buildozer.spec')
        if os.path.exists(spec):
            defaults = {'orientation': 'landscape', 'icon': '', 
                    'permissions': '', 'android.api': "18"}
            bconfig = ConfigParser(defaults, allow_no_value=True)
            bconfig.read(spec)
            package = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name')
            version = bconfig.get('app', 'version')
            vercode = None
        else:
            print "No android or kivy project could be found. Specify --subdir?"
            sys.exit(1)

    # Make sure it's actually new...
    for app in apps:
        if app['id'] == package:
            print "Package " + package + " already exists"
            sys.exit(1)

    # Construct the metadata...
    app = metadata.parse_metadata(None)
    app['id'] = package
    app['Web Site'] = website
    app['Source Code'] = sourcecode
    if issuetracker:
        app['Issue Tracker'] = issuetracker
    if license:
        app['License'] = license
    app['Repo Type'] = repotype
    app['Repo'] = repo
    app['Update Check Mode'] = "Tags"

    # Create a build line...
    build = {}
    build['version'] = version if version else '?'
    build['vercode'] = vercode if vercode else '?'
    build['commit'] = '?'
    build['disable'] = 'Generated by import.py - check/set version fields and commit id'
    if options.subdir:
        build['subdir'] = options.subdir
    if os.path.exists(os.path.join(root_dir, 'jni')):
        build['buildjni'] = 'yes'
    app['builds'].append(build)

    # Keep the repo directory to save bandwidth...
    if not os.path.exists('build'):
        os.mkdir('build')
    shutil.move(src_dir, os.path.join('build', package))
    with open('build/.fdroidvcs-' + package, 'w') as f:
        f.write(repotype + ' ' + repo)

    metafile = os.path.join('metadata', package + '.txt')
    metadata.write_metadata(metafile, app)
    print "Wrote " + metafile
Ejemplo n.º 24
0
def read_test_metadata():
    car_metadata = "/Users/salspaugh/classes/visualization/project/aptremake/specs/json/cars_coded.spec"
    return read_metadata(car_metadata)
Ejemplo n.º 25
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-u", "--url", default=None,
                      help="Project URL to import from.")
    parser.add_option("-s", "--subdir", default=None,
                      help="Path to main android project subdirectory, if not in root.")
    parser.add_option("-r", "--repo", default=None,
                      help="Allows a different repo to be specified for a multi-repo google code project")
    parser.add_option("--rev", default=None,
                      help="Allows a different revision (or git branch) to be specified for the initial import")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if not options.url:
        logging.error("Specify project url.")
        sys.exit(1)
    url = options.url

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    # Get all apps...
    apps = metadata.read_metadata()

    # Figure out what kind of project it is...
    projecttype = None
    issuetracker = None
    license = None
    website = url  # by default, we might override it
    if url.startswith('git://'):
        projecttype = 'git'
        repo = url
        repotype = 'git'
        sourcecode = ""
        website = ""
    elif url.startswith('https://github.com'):
        projecttype = 'github'
        repo = url
        repotype = 'git'
        sourcecode = url
        issuetracker = url + '/issues'
    elif url.startswith('https://gitlab.com/'):
        projecttype = 'gitlab'
        repo = url
        repotype = 'git'
        sourcecode = url
        issuetracker = url + '/issues'
    elif url.startswith('https://gitorious.org/'):
        projecttype = 'gitorious'
        repo = 'https://git.gitorious.org/' + url[22:] + '.git'
        repotype = 'git'
        sourcecode = url
    elif url.startswith('https://bitbucket.org/'):
        if url.endswith('/'):
            url = url[:-1]
        projecttype = 'bitbucket'
        sourcecode = url + '/src'
        issuetracker = url + '/issues'
        # Figure out the repo type and adddress...
        repotype, repo = getrepofrompage(sourcecode)
        if not repotype:
            logging.error("Unable to determine vcs type. " + repo)
            sys.exit(1)
    elif (url.startswith('http://code.google.com/p/') or
            url.startswith('https://code.google.com/p/')):
        if not url.endswith('/'):
            url += '/'
        projecttype = 'googlecode'
        sourcecode = url + 'source/checkout'
        if options.repo:
            sourcecode += "?repo=" + options.repo
        issuetracker = url + 'issues/list'

        # Figure out the repo type and adddress...
        repotype, repo = getrepofrompage(sourcecode)
        if not repotype:
            logging.error("Unable to determine vcs type. " + repo)
            sys.exit(1)

        # Figure out the license...
        req = urllib.urlopen(url)
        if req.getcode() != 200:
            logging.error('Unable to find project page at ' + sourcecode + ' - return code ' + str(req.getcode()))
            sys.exit(1)
        page = req.read()
        index = page.find('Code license')
        if index == -1:
            logging.error("Couldn't find license data")
            sys.exit(1)
        ltext = page[index:]
        lprefix = 'rel="nofollow">'
        index = ltext.find(lprefix)
        if index == -1:
            logging.error("Couldn't find license text")
            sys.exit(1)
        ltext = ltext[index + len(lprefix):]
        index = ltext.find('<')
        if index == -1:
            logging.error("License text not formatted as expected")
            sys.exit(1)
        ltext = ltext[:index]
        if ltext == 'GNU GPL v3':
            license = 'GPLv3'
        elif ltext == 'GNU GPL v2':
            license = 'GPLv2'
        elif ltext == 'Apache License 2.0':
            license = 'Apache2'
        elif ltext == 'MIT License':
            license = 'MIT'
        elif ltext == 'GNU Lesser GPL':
            license = 'LGPL'
        elif ltext == 'Mozilla Public License 1.1':
            license = 'MPL'
        elif ltext == 'New BSD License':
            license = 'NewBSD'
        else:
            logging.error("License " + ltext + " is not recognised")
            sys.exit(1)

    if not projecttype:
        logging.error("Unable to determine the project type.")
        logging.error("The URL you supplied was not in one of the supported formats. Please consult")
        logging.error("the manual for a list of supported formats, and supply one of those.")
        sys.exit(1)

    # Get a copy of the source so we can extract some info...
    logging.info('Getting source from ' + repotype + ' repo at ' + repo)
    src_dir = os.path.join(tmp_dir, 'importer')
    if os.path.exists(src_dir):
        shutil.rmtree(src_dir)
    vcs = common.getvcs(repotype, repo, src_dir)
    vcs.gotorevision(options.rev)
    if options.subdir:
        root_dir = os.path.join(src_dir, options.subdir)
    else:
        root_dir = src_dir

    # Extract some information...
    paths = common.manifest_paths(root_dir, None)
    if paths:

        version, vercode, package = common.parse_androidmanifests(paths)
        if not package:
            logging.error("Couldn't find package ID")
            sys.exit(1)
        if not version:
            logging.warn("Couldn't find latest version name")
        if not vercode:
            logging.warn("Couldn't find latest version code")
    else:
        spec = os.path.join(root_dir, 'buildozer.spec')
        if os.path.exists(spec):
            defaults = {'orientation': 'landscape', 'icon': '',
                        'permissions': '', 'android.api': "18"}
            bconfig = ConfigParser(defaults, allow_no_value=True)
            bconfig.read(spec)
            package = bconfig.get('app', 'package.domain') + '.' + bconfig.get('app', 'package.name')
            version = bconfig.get('app', 'version')
            vercode = None
        else:
            logging.error("No android or kivy project could be found. Specify --subdir?")
            sys.exit(1)

    # Make sure it's actually new...
    if package in apps:
        logging.error("Package " + package + " already exists")
        sys.exit(1)

    # Construct the metadata...
    app = metadata.parse_metadata(None)[1]
    app['Web Site'] = website
    app['Source Code'] = sourcecode
    if issuetracker:
        app['Issue Tracker'] = issuetracker
    if license:
        app['License'] = license
    app['Repo Type'] = repotype
    app['Repo'] = repo
    app['Update Check Mode'] = "Tags"

    # Create a build line...
    build = {}
    build['version'] = version or '?'
    build['vercode'] = vercode or '?'
    build['commit'] = '?'
    build['disable'] = 'Generated by import.py - check/set version fields and commit id'
    if options.subdir:
        build['subdir'] = options.subdir
    if os.path.exists(os.path.join(root_dir, 'jni')):
        build['buildjni'] = ['yes']

    for flag, value in metadata.flag_defaults.iteritems():
        if flag in build:
            continue
        build[flag] = value

    app['builds'].append(build)

    # Keep the repo directory to save bandwidth...
    if not os.path.exists('build'):
        os.mkdir('build')
    shutil.move(src_dir, os.path.join('build', package))
    with open('build/.fdroidvcs-' + package, 'w') as f:
        f.write(repotype + ' ' + repo)

    metafile = os.path.join('metadata', package + '.txt')
    metadata.write_metadata(metafile, app)
    logging.info("Wrote " + metafile)
Ejemplo n.º 26
0
def main():

    global config, options, curid, count
    curid = None

    count = Counter()

    def warn(message):
        global curid, count
        if curid:
            print "%s:" % curid
            curid = None
            count['app'] += 1
        print '    %s' % message
        count['warn'] += 1

    def pwarn(message):
        if options.pedantic:
            warn(message)

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-p", "--pedantic", action="store_true", default=False,
                      help="Show pedantic warnings that might give false positives")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=False)
    apps = common.read_app_args(args, allapps, False)

    for appid, app in apps.iteritems():
        if app['Disabled']:
            continue

        curid = appid
        count['app_total'] += 1

        curbuild = None
        for build in app['builds']:
            if not curbuild or int(build['vercode']) > int(curbuild['vercode']):
                curbuild = build

        # Potentially incorrect UCM
        if (curbuild and curbuild['commit']
                and app['Update Check Mode'] == 'RepoManifest' and
                any(s in curbuild['commit'] for s in '.,_-/')):
            pwarn("Last used commit '%s' looks like a tag, but Update Check Mode is '%s'" % (
                curbuild['commit'], app['Update Check Mode']))

        # Dangerous auto updates
        if curbuild and app['Auto Update Mode'] != 'None':
            for flag in ['target', 'srclibs', 'scanignore']:
                if curbuild[flag]:
                    pwarn("Auto Update Mode is enabled but '%s' is manually set at '%s'" % (flag, curbuild[flag]))

        # Summary size limit
        summ_chars = len(app['Summary'])
        if summ_chars > config['char_limits']['Summary']:
            warn("Summary of length %s is over the %i char limit" % (
                summ_chars, config['char_limits']['Summary']))

        # Redundant info
        if app['Web Site'] and app['Source Code']:
            if app['Web Site'].lower() == app['Source Code'].lower():
                warn("Website '%s' is just the app's source code link" % app['Web Site'])
                app['Web Site'] = ''

        name = app['Name'] or app['Auto Name']
        if app['Summary'] and name:
            if app['Summary'].lower() == name.lower():
                warn("Summary '%s' is just the app's name" % app['Summary'])

        if app['Summary'] and app['Description'] and len(app['Description']) == 1:
            if app['Summary'].lower() == app['Description'][0].lower():
                warn("Description '%s' is just the app's summary" % app['Summary'])

        # Description size limit
        desc_chars = sum(len(l) for l in app['Description'])
        if desc_chars > config['char_limits']['Description']:
            warn("Description of length %s is over the %i char limit" % (
                desc_chars, config['char_limits']['Description']))

        # Regex checks in all kinds of fields
        for f in regex_warnings:
            for m, r in regex_warnings[f]:
                t = metadata.metafieldtype(f)
                if t == 'string':
                    if m.match(app[f]):
                        warn("%s '%s': %s" % (f, app[f], r))
                elif t == 'multiline':
                    for l in app[f]:
                        if m.match(l):
                            warn("%s at line '%s': %s" % (f, l, r))

        # Regex pedantic checks in all kinds of fields
        if options.pedantic:
            for f in regex_pedantic:
                for m, r in regex_pedantic[f]:
                    if m.match(app[f]):
                        warn("%s '%s': %s" % (f, app[f], r))

        # Build warnings
        for build in app['builds']:
            if build['disable']:
                continue
            for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
                if build['commit'] and build['commit'].startswith(s):
                    warn("Branch '%s' used as commit in build '%s'" % (
                        s, build['version']))
                for srclib in build['srclibs']:
                    ref = srclib.split('@')[1].split('/')[0]
                    if ref.startswith(s):
                        warn("Branch '%s' used as commit in srclib '%s'" % (
                            s, srclib))
            for s in ['git clone', 'git svn clone', 'svn checkout', 'svn co', 'hg clone']:
                for flag in ['init', 'prebuild', 'build']:
                    if not build[flag]:
                        continue
                    if s in build[flag]:
                        # TODO: This should not be pedantic!
                        pwarn("'%s' used in %s '%s'" % (s, flag, build[flag]))

        if not curid:
            print

    logging.info("Found a total of %i warnings in %i apps out of %i total." % (
        count['warn'], count['app'], count['app_total']))
Ejemplo n.º 27
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-c", "--createmeta", action="store_true", default=False,
                      help="Create skeleton metadata files that are missing")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="No output, except for warnings and errors")
    parser.add_option("-b", "--buildreport", action="store_true", default=False,
                      help="Report on build data status")
    parser.add_option("-i", "--interactive", default=False, action="store_true",
                      help="Interactively ask about things that need updating.")
    parser.add_option("-I", "--icons", action="store_true", default=False,
                      help="Resize all the icons exceeding the max pixel size and exit")
    parser.add_option("-e", "--editor", default="/etc/alternatives/editor",
                      help="Specify editor to use in interactive mode. Default "+
                          "is /etc/alternatives/editor")
    parser.add_option("-w", "--wiki", default=False, action="store_true",
                      help="Update the wiki")
    parser.add_option("", "--pretty", action="store_true", default=False,
                      help="Produce human-readable index.xml")
    parser.add_option("--clean", action="store_true", default=False,
                      help="Clean update - don't uses caches, reprocess all apks")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')
        if not os.path.exists('archive'):
            os.mkdir('archive')

    if options.icons:
        resize_all_icons(repodirs)
        sys.exit(0)

    # Get all apps...
    apps = metadata.read_metadata()

    # Generate a list of categories...
    categories = []
    for app in apps:
        cats = app['Categories'].split(',')
        for cat in cats:
            if cat not in categories:
                categories.append(cat)

    # Read known apks data (will be updated and written back when we've finished)
    knownapks = common.KnownApks()

    # Gather information about all the apk files in the repo directory, using
    # cached data if possible.
    apkcachefile = os.path.join('tmp', 'apkcache')
    if not options.clean and os.path.exists(apkcachefile):
        with open(apkcachefile, 'rb') as cf:
            apkcache = pickle.load(cf)
    else:
        apkcache = {}
    cachechanged = False

    delete_disabled_builds(apps, apkcache, repodirs)

    # Scan all apks in the main repo
    apks, cc = scan_apks(apps, apkcache, repodirs[0], knownapks)
    if cc:
        cachechanged = True

    # Scan the archive repo for apks as well
    if len(repodirs) > 1:
        archapks, cc = scan_apks(apps, apkcache, repodirs[1], knownapks)
        if cc:
            cachechanged = True
    else:
        archapks = []

    # Some information from the apks needs to be applied up to the application
    # level. When doing this, we use the info from the most recent version's apk.
    # We deal with figuring out when the app was added and last updated at the
    # same time.
    for app in apps:
        bestver = 0
        added = None
        lastupdated = None
        for apk in apks + archapks:
            if apk['id'] == app['id']:
                if apk['versioncode'] > bestver:
                    bestver = apk['versioncode']
                    bestapk = apk

                if 'added' in apk:
                    if not added or apk['added'] < added:
                        added = apk['added']
                    if not lastupdated or apk['added'] > lastupdated:
                        lastupdated = apk['added']

        if added:
            app['added'] = added
        else:
            if options.verbose:
                print "WARNING: Don't know when " + app['id'] + " was added"
        if lastupdated:
            app['lastupdated'] = lastupdated
        else:
            if options.verbose:
                print "WARNING: Don't know when " + app['id'] + " was last updated"

        if bestver == 0:
            if app['Name'] is None:
                app['Name'] = app['id']
            app['icon'] = None
            if options.verbose and app['Disabled'] is None:
                print "WARNING: Application " + app['id'] + " has no packages"
        else:
            if app['Name'] is None:
                app['Name'] = bestapk['name']
            app['icon'] = bestapk['icon'] if 'icon' in bestapk else None

    # Sort the app list by name, then the web site doesn't have to by default.
    # (we had to wait until we'd scanned the apks to do this, because mostly the
    # name comes from there!)
    apps = sorted(apps, key=lambda app: app['Name'].upper())

    # Generate warnings for apk's with no metadata (or create skeleton
    # metadata files, if requested on the command line)
    for apk in apks:
        found = False
        for app in apps:
            if app['id'] == apk['id']:
                found = True
                break
        if not found:
            if options.createmeta:
                f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
                f.write("License:Unknown\n")
                f.write("Web Site:\n")
                f.write("Source Code:\n")
                f.write("Issue Tracker:\n")
                f.write("Summary:" + apk['name'] + "\n")
                f.write("Description:\n")
                f.write(apk['name'] + "\n")
                f.write(".\n")
                f.close()
                print "Generated skeleton metadata for " + apk['id']
            else:
                print "WARNING: " + apk['apkname'] + " (" + apk['id'] + ") has no metadata"
                print "       " + apk['name'] + " - " + apk['version']  

    if len(repodirs) > 1:
        archive_old_apks(apps, apks, repodirs[0], repodirs[1], config['archive_older'])

    # Make the index for the main repo...
    make_index(apps, apks, repodirs[0], False, categories)

    # If there's an archive repo,  make the index for it. We already scanned it
    # earlier on.
    if len(repodirs) > 1:
        make_index(apps, archapks, repodirs[1], True, categories)

    if config['update_stats']:

        # Update known apks info...
        knownapks.writeifchanged()

        # Generate latest apps data for widget
        if os.path.exists(os.path.join('stats', 'latestapps.txt')):
            data = ''
            for line in file(os.path.join('stats', 'latestapps.txt')):
                appid = line.rstrip()
                data += appid + "\t"
                for app in apps:
                    if app['id'] == appid:
                        data += app['Name'] + "\t"
                        if app['icon'] is not None:
                            data += app['icon'] + "\t"
                        data += app['License'] + "\n"
                        break
            f = open(os.path.join(repodirs[0], 'latestapps.dat'), 'w')
            f.write(data)
            f.close()

    if cachechanged:
        with open(apkcachefile, 'wb') as cf:
            pickle.dump(apkcache, cf)

    # Update the wiki...
    if options.wiki:
        update_wiki(apps, apks + archapks)

    print "Finished."
Ejemplo n.º 28
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] "
                          "[APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    log_dir = 'logs'
    if not os.path.isdir(log_dir):
        logging.info("Creating log directory")
        os.makedirs(log_dir)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    output_dir = 'repo'
    if not os.path.isdir(output_dir):
        logging.info("Creating output directory")
        os.makedirs(output_dir)

    unsigned_dir = 'unsigned'
    if not os.path.isdir(unsigned_dir):
        logging.warning("No unsigned directory - nothing to do")
        sys.exit(1)

    for f in [
            config['keystorepassfile'], config['keystore'],
            config['keypassfile']
    ]:
        if not os.path.exists(f):
            logging.error("Config error - missing '{0}'".format(f))
            sys.exit(1)

    # It was suggested at
    #    https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit
    # that a package could be crafted, such that it would use the same signing
    # key as an existing app. While it may be theoretically possible for such a
    # colliding package ID to be generated, it seems virtually impossible that
    # the colliding ID would be something that would be a) a valid package ID,
    # and b) a sane-looking ID that would make its way into the repo.
    # Nonetheless, to be sure, before publishing we check that there are no
    # collisions, and refuse to do any publishing if that's the case...
    allapps = metadata.read_metadata()
    vercodes = common.read_pkg_args(args, True)
    allaliases = []
    for appid in allapps:
        m = md5.new()
        m.update(appid)
        keyalias = m.hexdigest()[:8]
        if keyalias in allaliases:
            logging.error("There is a keyalias collision - publishing halted")
            sys.exit(1)
        allaliases.append(keyalias)
    logging.info("{0} apps, {0} key aliases".format(len(allapps),
                                                    len(allaliases)))

    # Process any apks that are waiting to be signed...
    for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):

        appid, vercode = common.apknameinfo(apkfile)
        apkfilename = os.path.basename(apkfile)
        if vercodes and appid not in vercodes:
            continue
        if appid in vercodes and vercodes[appid]:
            if vercode not in vercodes[appid]:
                continue
        logging.info("Processing " + apkfile)

        # There ought to be valid metadata for this app, otherwise why are we
        # trying to publish it?
        if appid not in allapps:
            logging.error("Unexpected {0} found in unsigned directory".format(
                apkfilename))
            sys.exit(1)
        app = allapps[appid]

        if app.get('Binaries', None):

            # It's an app where we build from source, and verify the apk
            # contents against a developer's binary, and then publish their
            # version if everything checks out.
            # The binary should already have been retrieved during the build
            # process.
            srcapk = apkfile + ".binary"

            # Compare our unsigned one with the downloaded one...
            compare_result = common.verify_apks(srcapk, apkfile, tmp_dir)
            if compare_result:
                logging.error("...verification failed - publish skipped : " +
                              compare_result)
                continue

            # Success! So move the downloaded file to the repo, and remove
            # our built version.
            shutil.move(srcapk, os.path.join(output_dir, apkfilename))
            os.remove(apkfile)

        else:

            # It's a 'normal' app, i.e. we sign and publish it...

            # Figure out the key alias name we'll use. Only the first 8
            # characters are significant, so we'll use the first 8 from
            # the MD5 of the app's ID and hope there are no collisions.
            # If a collision does occur later, we're going to have to
            # come up with a new alogrithm, AND rename all existing keys
            # in the keystore!
            if appid in config['keyaliases']:
                # For this particular app, the key alias is overridden...
                keyalias = config['keyaliases'][appid]
                if keyalias.startswith('@'):
                    m = md5.new()
                    m.update(keyalias[1:])
                    keyalias = m.hexdigest()[:8]
            else:
                m = md5.new()
                m.update(appid)
                keyalias = m.hexdigest()[:8]
            logging.info("Key alias: " + keyalias)

            # See if we already have a key for this application, and
            # if not generate one...
            p = FDroidPopen([
                'keytool', '-list', '-alias', keyalias, '-keystore',
                config['keystore'], '-storepass:file',
                config['keystorepassfile']
            ])
            if p.returncode != 0:
                logging.info("Key does not exist - generating...")
                p = FDroidPopen([
                    'keytool', '-genkey', '-keystore', config['keystore'],
                    '-alias', keyalias, '-keyalg', 'RSA', '-keysize', '2048',
                    '-validity', '10000', '-storepass:file',
                    config['keystorepassfile'], '-keypass:file',
                    config['keypassfile'], '-dname', config['keydname']
                ])
                # TODO keypass should be sent via stdin
                if p.returncode != 0:
                    raise BuildException("Failed to generate key")

            # Sign the application...
            p = FDroidPopen([
                'jarsigner', '-keystore', config['keystore'],
                '-storepass:file', config['keystorepassfile'], '-keypass:file',
                config['keypassfile'], '-sigalg', 'MD5withRSA', '-digestalg',
                'SHA1', apkfile, keyalias
            ])
            # TODO keypass should be sent via stdin
            if p.returncode != 0:
                raise BuildException("Failed to sign application")

            # Zipalign it...
            p = SdkToolsPopen([
                'zipalign', '-v', '4', apkfile,
                os.path.join(output_dir, apkfilename)
            ])
            if p.returncode != 0:
                raise BuildException("Failed to align application")
            os.remove(apkfile)

        # Move the source tarball into the output directory...
        tarfilename = apkfilename[:-4] + '_src.tar.gz'
        tarfile = os.path.join(unsigned_dir, tarfilename)
        if os.path.exists(tarfile):
            shutil.move(tarfile, os.path.join(output_dir, tarfilename))

        logging.info('Published ' + apkfilename)
Ejemplo n.º 29
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-u",
                      "--url",
                      default=None,
                      help="Project URL to import from.")
    parser.add_option(
        "-s",
        "--subdir",
        default=None,
        help="Path to main android project subdirectory, if not in root.")
    parser.add_option(
        "--rev",
        default=None,
        help=
        "Allows a different revision (or git branch) to be specified for the initial import"
    )
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if not options.url:
        logging.error("Specify project url.")
        sys.exit(1)
    url = options.url

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    # Get all apps...
    apps = metadata.read_metadata()

    # Figure out what kind of project it is...
    projecttype = None
    issuetracker = None
    license = None
    website = url  # by default, we might override it
    if url.startswith('git://'):
        projecttype = 'git'
        repo = url
        repotype = 'git'
        sourcecode = ""
        website = ""
    elif url.startswith('https://github.com'):
        projecttype = 'github'
        repo = url
        repotype = 'git'
        sourcecode = url
        issuetracker = url + '/issues'
        website = ""
    elif url.startswith('https://gitlab.com/'):
        projecttype = 'gitlab'
        repo = url
        repotype = 'git'
        sourcecode = url + '/tree/HEAD'
        issuetracker = url + '/issues'
    elif url.startswith('https://bitbucket.org/'):
        if url.endswith('/'):
            url = url[:-1]
        projecttype = 'bitbucket'
        sourcecode = url + '/src'
        issuetracker = url + '/issues'
        # Figure out the repo type and adddress...
        repotype, repo = getrepofrompage(sourcecode)
        if not repotype:
            logging.error("Unable to determine vcs type. " + repo)
            sys.exit(1)
    if not projecttype:
        logging.error("Unable to determine the project type.")
        logging.error(
            "The URL you supplied was not in one of the supported formats. Please consult"
        )
        logging.error(
            "the manual for a list of supported formats, and supply one of those."
        )
        sys.exit(1)

    # Ensure we have a sensible-looking repo address at this point. If not, we
    # might have got a page format we weren't expecting. (Note that we
    # specifically don't want git@...)
    if ((repotype != 'bzr' and
         (not repo.startswith('http://') and not repo.startswith('https://')
          and not repo.startswith('git://'))) or ' ' in repo):
        logging.error(
            "Repo address '{0}' does not seem to be valid".format(repo))
        sys.exit(1)

    # Get a copy of the source so we can extract some info...
    logging.info('Getting source from ' + repotype + ' repo at ' + repo)
    src_dir = os.path.join(tmp_dir, 'importer')
    if os.path.exists(src_dir):
        shutil.rmtree(src_dir)
    vcs = common.getvcs(repotype, repo, src_dir)
    vcs.gotorevision(options.rev)
    if options.subdir:
        root_dir = os.path.join(src_dir, options.subdir)
    else:
        root_dir = src_dir

    # Extract some information...
    paths = common.manifest_paths(root_dir, [])
    if paths:

        version, vercode, package = common.parse_androidmanifests(paths)
        if not package:
            logging.error("Couldn't find package ID")
            sys.exit(1)
        if not version:
            logging.warn("Couldn't find latest version name")
        if not vercode:
            logging.warn("Couldn't find latest version code")
    else:
        spec = os.path.join(root_dir, 'buildozer.spec')
        if os.path.exists(spec):
            defaults = {
                'orientation': 'landscape',
                'icon': '',
                'permissions': '',
                'android.api': "18"
            }
            bconfig = ConfigParser(defaults, allow_no_value=True)
            bconfig.read(spec)
            package = bconfig.get('app', 'package.domain') + '.' + bconfig.get(
                'app', 'package.name')
            version = bconfig.get('app', 'version')
            vercode = None
        else:
            logging.error(
                "No android or kivy project could be found. Specify --subdir?")
            sys.exit(1)

    # Make sure it's actually new...
    if package in apps:
        logging.error("Package " + package + " already exists")
        sys.exit(1)

    # Construct the metadata...
    app = metadata.parse_metadata(None)[1]
    app['Web Site'] = website
    app['Source Code'] = sourcecode
    if issuetracker:
        app['Issue Tracker'] = issuetracker
    if license:
        app['License'] = license
    app['Repo Type'] = repotype
    app['Repo'] = repo
    app['Update Check Mode'] = "Tags"

    # Create a build line...
    build = {}
    build['version'] = version or '?'
    build['vercode'] = vercode or '?'
    build['commit'] = '?'
    build[
        'disable'] = 'Generated by import.py - check/set version fields and commit id'
    if options.subdir:
        build['subdir'] = options.subdir
    if os.path.exists(os.path.join(root_dir, 'jni')):
        build['buildjni'] = ['yes']

    for flag, value in metadata.flag_defaults.iteritems():
        if flag in build:
            continue
        build[flag] = value

    app['builds'].append(build)

    # Keep the repo directory to save bandwidth...
    if not os.path.exists('build'):
        os.mkdir('build')
    shutil.move(src_dir, os.path.join('build', package))
    with open('build/.fdroidvcs-' + package, 'w') as f:
        f.write(repotype + ' ' + repo)

    metafile = os.path.join('metadata', package + '.txt')
    metadata.write_metadata(metafile, app)
    logging.info("Wrote " + metafile)
Ejemplo n.º 30
0
def main():

    common.read_config(None)

    metadata.read_metadata(xref=True)
Ejemplo n.º 31
0
def main():

    global config, options, curid, count
    curid = None

    count = Counter()

    def warn(message):
        global curid, count
        if curid:
            print "%s:" % curid
            curid = None
            count['app'] += 1
        print '    %s' % message
        count['warn'] += 1

    def pwarn(message):
        if options.pedantic:
            warn(message)

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option(
        "-p",
        "--pedantic",
        action="store_true",
        default=False,
        help="Show pedantic warnings that might give false positives")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=False)
    apps = common.read_app_args(args, allapps, False)

    for appid, app in apps.iteritems():
        curid = appid
        lastcommit = ''

        if app['Disabled']:
            continue

        for build in app['builds']:
            if build['commit'] and not build['disable']:
                lastcommit = build['commit']

        # Potentially incorrect UCM
        if (app['Update Check Mode'] == 'RepoManifest'
                and any(s in lastcommit for s in '.,_-/')):
            pwarn(
                "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'"
                % (lastcommit, app['Update Check Mode']))

        # Summary size limit
        summ_chars = len(app['Summary'])
        if summ_chars > config['char_limits']['Summary']:
            warn("Summary of length %s is over the %i char limit" %
                 (summ_chars, config['char_limits']['Summary']))

        # Redundant info
        if app['Web Site'] and app['Source Code']:
            if app['Web Site'].lower() == app['Source Code'].lower():
                warn("Website '%s' is just the app's source code link" %
                     app['Web Site'])
                app['Web Site'] = ''

        name = app['Name'] or app['Auto Name']
        if app['Summary'] and name:
            if app['Summary'].lower() == name.lower():
                warn("Summary '%s' is just the app's name" % app['Summary'])

        if app['Summary'] and app['Description']:
            if app['Summary'].lower() == app['Description'][0].lower():
                warn("Description '%s' is just the app's summary" %
                     app['Summary'])

        # Description size limit
        desc_chars = sum(len(l) for l in app['Description'])
        if desc_chars > config['char_limits']['Description']:
            warn("Description of length %s is over the %i char limit" %
                 (desc_chars, config['char_limits']['Description']))

        # Regex checks in all kinds of fields
        for f in regex_warnings:
            for m, r in regex_warnings[f]:
                t = metadata.metafieldtype(f)
                if t == 'string':
                    if m.match(app[f]):
                        warn("%s '%s': %s" % (f, app[f], r))
                elif t == 'multiline':
                    for l in app[f]:
                        if m.match(l):
                            warn("%s at line '%s': %s" % (f, l, r))

        # Regex pedantic checks in all kinds of fields
        if options.pedantic:
            for f in regex_pedantic:
                for m, r in regex_pedantic[f]:
                    if m.match(app[f]):
                        warn("%s '%s': %s" % (f, app[f], r))

        # Build warnings
        for build in app['builds']:
            if build['disable']:
                continue
            for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
                if build['commit'] and build['commit'].startswith(s):
                    warn("Branch '%s' used as commit in build '%s'" %
                         (s, build['version']))
                for srclib in build['srclibs']:
                    ref = srclib.split('@')[1].split('/')[0]
                    if ref.startswith(s):
                        warn("Branch '%s' used as commit in srclib '%s'" %
                             (s, srclib))
            for s in [
                    'git clone', 'git svn clone', 'svn checkout', 'svn co',
                    'hg clone'
            ]:
                for flag in ['init', 'prebuild', 'build']:
                    if not build[flag]:
                        continue
                    if s in build[flag]:
                        # TODO: This should not be pedantic!
                        pwarn("'%s' used in %s '%s'" % (s, flag, build[flag]))

        if not curid:
            print

    logging.info("Found a total of %i warnings in %i apps." %
                 (count['warn'], count['app']))
def uploader():
    """main() - parse args, make Zenodo uploader, execute, catch errors"""

    parser = argparse.ArgumentParser()

    # zenodo / administrative matters
    parser.add_argument("-z", "--zenodo_id", help="zenodo upload key")
    parser.add_argument("-s",
                        "--sandbox",
                        help="use sandbox mode",
                        action="store_true")

    # upload metadata - title, authors, keywords, description, metafile
    parser.add_argument("-m", "--metadata", help="json metadata file")
    parser.add_argument("-T", "--title", help="upload title")
    parser.add_argument("-C",
                        "--creator",
                        help="creator name e.g. Public, Joe Q.",
                        action="append")
    parser.add_argument("-A",
                        "--affiliation",
                        help="creator affiliation",
                        action="append")
    parser.add_argument("-K",
                        "--keyword",
                        help="keyword to associate",
                        action="append")
    parser.add_argument("-D", "--description", help="description")

    # file related stuff
    parser.add_argument("-d",
                        "--directory",
                        help="directory to upload",
                        action="append")
    parser.add_argument("files", nargs="*", help="individual files")
    parser.add_argument(
        "-x",
        "--checksum",
        help="compute md5 checksum of uploaded files",
        action="store_true",
    )

    # what we are doing with the files
    parser.add_argument(
        "-a",
        "--archive",
        help="pack directory to named archive before upload",
        action="append",
    )
    args = parser.parse_args()

    # validate metadata - allow file read and update from command line
    # (with that priority)
    if args.metadata:
        metadata = read_metadata(args.metadata)
    else:
        metadata = {}

    # pull files or directory (&c.) from metadata file if that is where they are
    if not args.directory and "directory" in metadata:
        args.directory = metadata["directory"]
        del metadata["directory"]

    if not args.files and "files" in metadata:
        args.files = metadata["files"]
        del metadata["files"]

    if not args.archive and "archive" in metadata:
        args.archive = metadata["archive"]
        del metadata["archive"]

    # validate inputs - must pass some files, only pass files _or_ directories
    if not args.directory and not args.files:
        sys.exit("must pass some files for upload")
    if args.directory and args.files:
        sys.exit("only pass files or directories")

    if args.archive:
        if args.files and len(args.archive) != 1:
            sys.exit(
                "if passing individual files and archive, only one archive allowed"
            )
        if args.directory and len(args.directory) != len(args.archive):
            sys.exit("number of archives must equal number of directories")

    # check that we can guess what format to use for archives
    if args.archive:
        for archive in args.archive:
            if not archive.endswith(".zip") and not archive.endswith(
                    ".tar.gz"):
                sys.exit("unknown archive type for %s" % archive)

    if not args.zenodo_id:
        args.zenodo_id = get_access_token(sandbox=args.sandbox)

    cl_metadata = make_metadata(args.title, args.description, args.creator,
                                args.affiliation, args.keyword)

    metadata.update(cl_metadata)
    validate_metadata(metadata)

    # explain what we are going to do
    print("ID: %s" % args.zenodo_id)

    # prepare archives / files for upload
    uploads = []

    if args.archive:
        if args.files:
            uploads.append(packup(args.archive[0], args.files))
        else:
            for archive, directory in zip(args.archive, args.directory):
                files = [
                    os.path.join(directory, filename)
                    for filename in os.listdir(directory)
                    if os.path.isfile(os.path.join(directory, filename))
                ]
                uploads.append(packup(archive, files))

    elif args.directory:
        for directory in args.directory:
            uploads.extend([
                os.path.join(directory, filename)
                for filename in os.listdir(directory)
                if os.path.isfile(os.path.join(directory, filename))
            ])
    else:
        uploads.extend(args.files)

    # metadata
    print_metadata(metadata)

    # files
    print("Upload consists of:")
    for upload in uploads:
        print(upload)
        if args.checksum:
            print("md5:%s" % md5(upload))

    # make and act on
    zenodo_uploader = ZenodoUploader(uploads, metadata, args.zenodo_id,
                                     args.sandbox)
    zenodo_uploader.upload()
    print("Upload complete for deposition %s" %
          str(zenodo_uploader.get_deposition()))
Ejemplo n.º 33
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id with optional versioncode in the form APPID[:VERCODE]")
    options = parser.parse_args()

    config = common.read_config(options)

    # Read all app and srclib metadata
    allapps = metadata.read_metadata()
    apps = common.read_app_args(options.appid, allapps, True)

    probcount = 0

    build_dir = 'build'
    if not os.path.isdir(build_dir):
        logging.info("Creating build directory")
        os.makedirs(build_dir)
    srclib_dir = os.path.join(build_dir, 'srclib')
    extlib_dir = os.path.join(build_dir, 'extlib')

    for appid, app in apps.iteritems():

        if app.Disabled:
            logging.info("Skipping %s: disabled" % appid)
            continue
        if not app.builds:
            logging.info("Skipping %s: no builds specified" % appid)
            continue

        logging.info("Processing " + appid)

        try:

            if app.RepoType == 'srclib':
                build_dir = os.path.join('build', 'srclib', app.Repo)
            else:
                build_dir = os.path.join('build', appid)

            # Set up vcs interface and make sure we have the latest code...
            vcs = common.getvcs(app.RepoType, app.Repo, build_dir)

            for build in app.builds:

                if build.disable:
                    logging.info("...skipping version %s - %s" % (
                        build.version, build.get('disable', build.commit[1:])))
                else:
                    logging.info("...scanning version " + build.version)

                    # Prepare the source code...
                    root_dir, _ = common.prepare_source(vcs, app, build,
                                                        build_dir, srclib_dir,
                                                        extlib_dir, False)

                    # Do the scan...
                    count = scan_source(build_dir, root_dir, build)
                    if count > 0:
                        logging.warn('Scanner found %d problems in %s (%s)' % (
                            count, appid, build.vercode))
                        probcount += count

        except BuildException as be:
            logging.warn("Could not scan app %s due to BuildException: %s" % (
                appid, be))
            probcount += 1
        except VCSException as vcse:
            logging.warn("VCS error while scanning app %s: %s" % (appid, vcse))
            probcount += 1
        except Exception:
            logging.warn("Could not scan app %s due to unknown error: %s" % (
                appid, traceback.format_exc()))
            probcount += 1

    logging.info("Finished:")
    print("%d problems found" % probcount)
Ejemplo n.º 34
0
def main():

    global options, config

    options, parser = parse_commandline()

    metadata_files = glob.glob('.fdroid.*[a-z]')  # ignore files ending in ~
    if os.path.isdir('metadata'):
        pass
    elif len(metadata_files) == 0:
        raise FDroidException("No app metadata found, nothing to process!")
    elif len(metadata_files) > 1:
        raise FDroidException("Only one local metadata file allowed! Found: "
                              + " ".join(metadata_files))

    if not options.appid and not options.all:
        parser.error("option %s: If you really want to build all the apps, use --all" % "all")

    config = common.read_config(options)

    if config['build_server_always']:
        options.server = True
    if options.resetserver and not options.server:
        parser.error("option %s: Using --resetserver without --server makes no sense" % "resetserver")

    log_dir = 'logs'
    if not os.path.isdir(log_dir):
        logging.info("Creating log directory")
        os.makedirs(log_dir)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    if options.test:
        output_dir = tmp_dir
    else:
        output_dir = 'unsigned'
        if not os.path.isdir(output_dir):
            logging.info("Creating output directory")
            os.makedirs(output_dir)

    if config['archive_older'] != 0:
        also_check_dir = 'archive'
    else:
        also_check_dir = None

    repo_dir = 'repo'

    build_dir = 'build'
    if not os.path.isdir(build_dir):
        logging.info("Creating build directory")
        os.makedirs(build_dir)
    srclib_dir = os.path.join(build_dir, 'srclib')
    extlib_dir = os.path.join(build_dir, 'extlib')

    # Read all app and srclib metadata
    allapps = metadata.read_metadata(xref=not options.onserver)

    apps = common.read_app_args(options.appid, allapps, True)
    for appid, app in apps.items():
        if (app.Disabled and not options.force) or not app.RepoType or not app.builds:
            del apps[appid]

    if not apps:
        raise FDroidException("No apps to process.")

    if options.latest:
        for app in apps.itervalues():
            for build in reversed(app.builds):
                if build.disable and not options.force:
                    continue
                app.builds = [build]
                break

    if options.wiki:
        import mwclient
        site = mwclient.Site((config['wiki_protocol'], config['wiki_server']),
                             path=config['wiki_path'])
        site.login(config['wiki_user'], config['wiki_password'])

    # Build applications...
    failed_apps = {}
    build_succeeded = []
    for appid, app in apps.iteritems():

        first = True

        for build in app.builds:
            wikilog = None
            try:

                # For the first build of a particular app, we need to set up
                # the source repo. We can reuse it on subsequent builds, if
                # there are any.
                if first:
                    if app.RepoType == 'srclib':
                        build_dir = os.path.join('build', 'srclib', app.Repo)
                    else:
                        build_dir = os.path.join('build', appid)

                    # Set up vcs interface and make sure we have the latest code...
                    logging.debug("Getting {0} vcs interface for {1}"
                                  .format(app.RepoType, app.Repo))
                    vcs = common.getvcs(app.RepoType, app.Repo, build_dir)

                    first = False

                logging.debug("Checking " + build.version)
                if trybuild(app, build, build_dir, output_dir,
                            also_check_dir, srclib_dir, extlib_dir,
                            tmp_dir, repo_dir, vcs, options.test,
                            options.server, options.force,
                            options.onserver, options.refresh):

                    if app.Binaries is not None:
                        # This is an app where we build from source, and
                        # verify the apk contents against a developer's
                        # binary. We get that binary now, and save it
                        # alongside our built one in the 'unsigend'
                        # directory.
                        url = app.Binaries
                        url = url.replace('%v', build.version)
                        url = url.replace('%c', str(build.vercode))
                        logging.info("...retrieving " + url)
                        of = "{0}_{1}.apk.binary".format(app.id, build.vercode)
                        of = os.path.join(output_dir, of)
                        net.download_file(url, local_filename=of)

                    build_succeeded.append(app)
                    wikilog = "Build succeeded"
            except VCSException as vcse:
                reason = str(vcse).split('\n', 1)[0] if options.verbose else str(vcse)
                logging.error("VCS error while building app %s: %s" % (
                    appid, reason))
                if options.stop:
                    sys.exit(1)
                failed_apps[appid] = vcse
                wikilog = str(vcse)
            except FDroidException as e:
                with open(os.path.join(log_dir, appid + '.log'), 'a+') as f:
                    f.write(str(e))
                logging.error("Could not build app %s: %s" % (appid, e))
                if options.stop:
                    sys.exit(1)
                failed_apps[appid] = e
                wikilog = e.get_wikitext()
            except Exception as e:
                logging.error("Could not build app %s due to unknown error: %s" % (
                    appid, traceback.format_exc()))
                if options.stop:
                    sys.exit(1)
                failed_apps[appid] = e
                wikilog = str(e)

            if options.wiki and wikilog:
                try:
                    # Write a page with the last build log for this version code
                    lastbuildpage = appid + '/lastbuild_' + build.vercode
                    newpage = site.Pages[lastbuildpage]
                    txt = "Build completed at " + time.strftime("%Y-%m-%d %H:%M:%SZ", time.gmtime()) + "\n\n" + wikilog
                    newpage.save(txt, summary='Build log')
                    # Redirect from /lastbuild to the most recent build log
                    newpage = site.Pages[appid + '/lastbuild']
                    newpage.save('#REDIRECT [[' + lastbuildpage + ']]', summary='Update redirect')
                except:
                    logging.error("Error while attempting to publish build log")

    for app in build_succeeded:
        logging.info("success: %s" % (app.id))

    if not options.verbose:
        for fa in failed_apps:
            logging.info("Build for app %s failed:\n%s" % (fa, failed_apps[fa]))

    logging.info("Finished.")
    if len(build_succeeded) > 0:
        logging.info(str(len(build_succeeded)) + ' builds succeeded')
    if len(failed_apps) > 0:
        logging.info(str(len(failed_apps)) + ' builds failed')

    sys.exit(0)
Ejemplo n.º 35
0
def read_test_metadata():
    car_metadata = "/Users/salspaugh/classes/visualization/project/aptremake/specs/json/cars_coded.spec"
    return read_metadata(car_metadata) 
Ejemplo n.º 36
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] [APPID [APPID ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id to check for updates")
    parser.add_argument("--auto", action="store_true", default=False,
                        help="Process auto-updates")
    parser.add_argument("--autoonly", action="store_true", default=False,
                        help="Only process apps with auto-updates")
    parser.add_argument("--commit", action="store_true", default=False,
                        help="Commit changes")
    parser.add_argument("--gplay", action="store_true", default=False,
                        help="Only print differences with the Play Store")
    options = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata()

    apps = common.read_app_args(options.appid, allapps, False)

    if options.gplay:
        for app in apps:
            version, reason = check_gplay(app)
            if version is None:
                if reason == '404':
                    logging.info("{0} is not in the Play Store".format(common.getappname(app)))
                else:
                    logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
            if version is not None:
                stored = app.CurrentVersion
                if not stored:
                    logging.info("{0} has no Current Version but has version {1} on the Play Store"
                                 .format(common.getappname(app), version))
                elif LooseVersion(stored) < LooseVersion(version):
                    logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
                                 .format(common.getappname(app), version, stored))
                else:
                    if stored != version:
                        logging.info("{0} has version {1} on the Play Store, which differs from {2}"
                                     .format(common.getappname(app), version, stored))
                    else:
                        logging.info("{0} has the same version {1} on the Play Store"
                                     .format(common.getappname(app), version))
        return

    for appid, app in apps.iteritems():

        if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
            logging.debug("Nothing to do for {0}...".format(appid))
            continue

        logging.info("Processing " + appid + '...')

        checkupdates_app(app)

    logging.info("Finished.")
Ejemplo n.º 37
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-d", "--download", action="store_true", default=False,
                      help="Download logs we don't have")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if not config['update_stats']:
        print "Stats are disabled - check your configuration"
        sys.exit(1)

    # Get all metadata-defined apps...
    metaapps = metadata.read_metadata(options.verbose)

    statsdir = 'stats'
    logsdir = os.path.join(statsdir, 'logs')
    datadir = os.path.join(statsdir, 'data')
    if not os.path.exists(statsdir):
        os.mkdir(statsdir)
    if not os.path.exists(logsdir):
        os.mkdir(logsdir)
    if not os.path.exists(datadir):
        os.mkdir(datadir)

    if options.download:
        # Get any access logs we don't have...
        ssh = None
        ftp = None
        try:
            print 'Retrieving logs'
            ssh = paramiko.SSHClient()
            ssh.load_system_host_keys()
            ssh.connect('f-droid.org', username='******', timeout=10,
                    key_filename=config['webserver_keyfile'])
            ftp = ssh.open_sftp()
            ftp.get_channel().settimeout(60)
            print "...connected"

            ftp.chdir('logs')
            files = ftp.listdir()
            for f in files:
                if f.startswith('access-') and f.endswith('.log.gz'):

                    destpath = os.path.join(logsdir, f)
                    destsize = ftp.stat(f).st_size
                    if (not os.path.exists(destpath) or
                            os.path.getsize(destpath) != destsize):
                        print "...retrieving " + f
                        ftp.get(f, destpath)
        except Exception:
            traceback.print_exc()
            sys.exit(1)
        finally:
            #Disconnect
            if ftp is not None:
                ftp.close()
            if ssh is not None:
                ssh.close()

    # Process logs
    if options.verbose:
        print 'Processing logs...'
    logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] "GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) \d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
    logsearch = re.compile(logexpr).search
    apps = {}
    unknownapks = []
    knownapks = common.KnownApks()
    for logfile in glob.glob(os.path.join(logsdir,'access-*.log.gz')):
        if options.verbose:
            print '...' + logfile
        p = subprocess.Popen(["zcat", logfile], stdout = subprocess.PIPE)
        matches = (logsearch(line) for line in p.stdout)
        for match in matches:
            if match and match.group('statuscode') == '200':
                uri = match.group('uri')
                if uri.endswith('.apk'):
                    _, apkname = os.path.split(uri)
                    app = knownapks.getapp(apkname)
                    if app:
                        appid, _ = app
                        if appid in apps:
                            apps[appid] += 1
                        else:
                            apps[appid] = 1
                    else:
                        if not apkname in unknownapks:
                            unknownapks.append(apkname)

    # Calculate and write stats for total downloads...
    lst = []
    alldownloads = 0
    for app, count in apps.iteritems():
        lst.append(app + " " + str(count))
        if config['stats_to_carbon']:
            carbon_send('fdroid.download.' + app.replace('.', '_'), count)
        alldownloads += count
    lst.append("ALL " + str(alldownloads))
    f = open('stats/total_downloads_app.txt', 'w')
    f.write('# Total downloads by application, since October 2011\n')
    for line in sorted(lst):
        f.write(line + '\n')
    f.close()

    # Calculate and write stats for repo types...
    repotypes = {}
    for app in metaapps:
        if len(app['Repo Type']) == 0:
            rtype = 'none'
        else:
            if app['Repo Type'] == 'srclib':
                rtype = common.getsrclibvcs(app['Repo'])
            else:
                rtype = app['Repo Type']
        if rtype in repotypes:
            repotypes[rtype] += 1;
        else:
            repotypes[rtype] = 1
    f = open('stats/repotypes.txt', 'w')
    for rtype, count in repotypes.iteritems():
        f.write(rtype + ' ' + str(count) + '\n')
    f.close()

    # Calculate and write stats for update check modes...
    ucms = {}
    for app in metaapps:
        checkmode = app['Update Check Mode'].split('/')[0]
        if checkmode in ucms:
            ucms[checkmode] += 1;
        else:
            ucms[checkmode] = 1
    f = open('stats/update_check_modes.txt', 'w')
    for checkmode, count in ucms.iteritems():
        f.write(checkmode + ' ' + str(count) + '\n')
    f.close()

    ctgs = {}
    for app in metaapps:
        if app['Categories'] is None:
            continue
        categories = [c.strip() for c in app['Categories'].split(',')]
        for category in categories:
            if category in ctgs:
                ctgs[category] += 1;
            else:
                ctgs[category] = 1
    f = open('stats/categories.txt', 'w')
    for category, count in ctgs.iteritems():
        f.write(category + ' ' + str(count) + '\n')
    f.close()

    afs = {}
    for app in metaapps:
        if app['AntiFeatures'] is None:
            continue
        antifeatures = [a.strip() for a in app['AntiFeatures'].split(',')]
        for antifeature in antifeatures:
            if antifeature in afs:
                afs[antifeature] += 1;
            else:
                afs[antifeature] = 1
    f = open('stats/antifeatures.txt', 'w')
    for antifeature, count in afs.iteritems():
        f.write(antifeature + ' ' + str(count) + '\n')
    f.close()
    return

    # Calculate and write stats for licenses...
    licenses = {}
    for app in metaapps:
        license = app['License']
        if license in licenses:
            licenses[license] += 1;
        else:
            licenses[license] = 1
    f = open('stats/licenses.txt', 'w')
    for license, count in licenses.iteritems():
        f.write(license + ' ' + str(count) + '\n')
    f.close()

    # Write list of latest apps added to the repo...
    latest = knownapks.getlatest(10)
    f = open('stats/latestapps.txt', 'w')
    for app in latest:
        f.write(app + '\n')
    f.close()

    if len(unknownapks) > 0:
        print '\nUnknown apks:'
        for apk in unknownapks:
            print apk

    print "Finished."
Ejemplo n.º 38
0
def main():

    global config, options, curid, count
    curid = None

    count = Counter()

    def warn(message):
        global curid, count
        if curid:
            print "%s:" % curid
            curid = None
            count['app'] += 1
        print '    %s' % message
        count['warn'] += 1

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=False)
    apps = common.read_app_args(args, allapps, False)

    filling_ucms = re.compile('^(Tags.*|RepoManifest.*)')

    for appid, app in apps.iteritems():
        if app['Disabled']:
            continue

        curid = appid
        count['app_total'] += 1

        # enabled_builds = 0
        lowest_vercode = -1
        curbuild = None
        for build in app['builds']:
            if not build['disable']:
                # enabled_builds += 1
                vercode = int(build['vercode'])
                if lowest_vercode == -1 or vercode < lowest_vercode:
                    lowest_vercode = vercode
            if not curbuild or int(build['vercode']) > int(
                    curbuild['vercode']):
                curbuild = build

        # Incorrect UCM
        if (curbuild and curbuild['commit']
                and app['Update Check Mode'] == 'RepoManifest'
                and not curbuild['commit'].startswith('unknown')
                and curbuild['vercode'] == app['Current Version Code']
                and not curbuild['forcevercode']
                and any(s in curbuild['commit'] for s in '.,_-/')):
            warn(
                "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'"
                % (curbuild['commit'], app['Update Check Mode']))

        # Summary size limit
        summ_chars = len(app['Summary'])
        if summ_chars > config['char_limits']['Summary']:
            warn("Summary of length %s is over the %i char limit" %
                 (summ_chars, config['char_limits']['Summary']))

        # Redundant info
        if app['Web Site'] and app['Source Code']:
            if app['Web Site'].lower() == app['Source Code'].lower():
                warn("Website '%s' is just the app's source code link" %
                     app['Web Site'])

        if filling_ucms.match(app['Update Check Mode']):
            if all(app[f] == metadata.app_defaults[f] for f in [
                    'Auto Name',
                    'Current Version',
                    'Current Version Code',
            ]):
                warn(
                    "UCM is set but it looks like checkupdates hasn't been run yet"
                )

        if app['Update Check Name'] == appid:
            warn(
                "Update Check Name is set to the known app id - it can be removed"
            )

        cvc = int(app['Current Version Code'])
        if cvc > 0 and cvc < lowest_vercode:
            warn("Current Version Code is lower than any enabled build")

        # Missing or incorrect categories
        if not app['Categories']:
            warn("Categories are not set")
        for categ in app['Categories']:
            if categ not in categories:
                warn("Category '%s' is not valid" % categ)

        if app['Name'] and app['Name'] == app['Auto Name']:
            warn("Name '%s' is just the auto name" % app['Name'])

        name = app['Name'] or app['Auto Name']
        if app['Summary'] and name:
            if app['Summary'].lower() == name.lower():
                warn("Summary '%s' is just the app's name" % app['Summary'])

        desc = app['Description']
        if app['Summary'] and desc and len(desc) == 1:
            if app['Summary'].lower() == desc[0].lower():
                warn("Description '%s' is just the app's summary" %
                     app['Summary'])

        # Description size limit
        desc_charcount = sum(len(l) for l in desc)
        if desc_charcount > config['char_limits']['Description']:
            warn("Description of length %s is over the %i char limit" %
                 (desc_charcount, config['char_limits']['Description']))

        if (not desc[0] or not desc[-1] or any(not desc[l - 1] and not desc[l]
                                               for l in range(1, len(desc)))):
            warn("Description has an extra empty line")

        # Check for lists using the wrong characters
        validchars = ['*', '#']
        lchar = ''
        lcount = 0
        for l in app['Description']:
            if len(l) < 1:
                continue

            for um in desc_url.finditer(l):
                url = um.group(1)
                for m, r in http_warnings:
                    if m.match(url):
                        warn("URL '%s' in Description: %s" % (url, r))

            c = l.decode('utf-8')[0]
            if c == lchar:
                lcount += 1
                if lcount > 3 and lchar not in validchars:
                    warn(
                        "Description has a list (%s) but it isn't bulleted (*) nor numbered (#)"
                        % lchar)
                    break
            else:
                lchar = c
                lcount = 1

        # Regex checks in all kinds of fields
        for f in regex_warnings:
            for m, r in regex_warnings[f]:
                v = app[f]
                if type(v) == str:
                    if v is None:
                        continue
                    if m.match(v):
                        warn("%s '%s': %s" % (f, v, r))
                elif type(v) == list:
                    for l in v:
                        if m.match(l):
                            warn("%s at line '%s': %s" % (f, l, r))

        # Build warnings
        for build in app['builds']:
            if build['disable']:
                continue
            for s in ['master', 'origin', 'HEAD', 'default', 'trunk']:
                if build['commit'] and build['commit'].startswith(s):
                    warn("Branch '%s' used as commit in build '%s'" %
                         (s, build['version']))
                for srclib in build['srclibs']:
                    ref = srclib.split('@')[1].split('/')[0]
                    if ref.startswith(s):
                        warn("Branch '%s' used as commit in srclib '%s'" %
                             (s, srclib))

        if not curid:
            print

    logging.info("Found a total of %i warnings in %i apps out of %i total." %
                 (count['warn'], count['app'], count['app_total']))

    sys.exit(1 if count['warn'] > 0 else 0)
Ejemplo n.º 39
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("--auto", action="store_true", default=False,
                      help="Process auto-updates")
    parser.add_option("--autoonly", action="store_true", default=False,
                      help="Only process apps with auto-updates")
    parser.add_option("--commit", action="store_true", default=False,
                      help="Commit changes")
    parser.add_option("--gplay", action="store_true", default=False,
                      help="Only print differences with the Play Store")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata()

    apps = common.read_app_args(args, allapps, False)

    if options.gplay:
        for app in apps:
            version, reason = check_gplay(app)
            if version is None:
                if reason == '404':
                    logging.info("{0} is not in the Play Store".format(common.getappname(app)))
                else:
                    logging.info("{0} encountered a problem: {1}".format(common.getappname(app), reason))
            if version is not None:
                stored = app['Current Version']
                if not stored:
                    logging.info("{0} has no Current Version but has version {1} on the Play Store"
                                 .format(common.getappname(app), version))
                elif LooseVersion(stored) < LooseVersion(version):
                    logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
                                 .format(common.getappname(app), version, stored))
                else:
                    if stored != version:
                        logging.info("{0} has version {1} on the Play Store, which differs from {2}"
                                     .format(common.getappname(app), version, stored))
                    else:
                        logging.info("{0} has the same version {1} on the Play Store"
                                     .format(common.getappname(app), version))
        return

    for appid, app in apps.iteritems():

        if options.autoonly and app['Auto Update Mode'] in ('None', 'Static'):
            logging.debug("Nothing to do for {0}...".format(appid))
            continue

        logging.info("Processing " + appid + '...')

        checkupdates_app(app)

    logging.info("Finished.")
Ejemplo n.º 40
0
def main():

    global options, config

    # Parse command line...
    parser = ArgumentParser()
    common.setup_global_opts(parser)
    parser.add_argument("-d", "--download", action="store_true", default=False,
                        help="Download logs we don't have")
    parser.add_argument("--recalc", action="store_true", default=False,
                        help="Recalculate aggregate stats - use when changes "
                        "have been made that would invalidate old cached data.")
    parser.add_argument("--nologs", action="store_true", default=False,
                        help="Don't do anything logs-related")
    options = parser.parse_args()

    config = common.read_config(options)

    if not config['update_stats']:
        logging.info("Stats are disabled - set \"update_stats = True\" in your config.py")
        sys.exit(1)

    # Get all metadata-defined apps...
    allmetaapps = [app for app in metadata.read_metadata().itervalues()]
    metaapps = [app for app in allmetaapps if not app.Disabled]

    statsdir = 'stats'
    logsdir = os.path.join(statsdir, 'logs')
    datadir = os.path.join(statsdir, 'data')
    if not os.path.exists(statsdir):
        os.mkdir(statsdir)
    if not os.path.exists(logsdir):
        os.mkdir(logsdir)
    if not os.path.exists(datadir):
        os.mkdir(datadir)

    if options.download:
        # Get any access logs we don't have...
        ssh = None
        ftp = None
        try:
            logging.info('Retrieving logs')
            ssh = paramiko.SSHClient()
            ssh.load_system_host_keys()
            ssh.connect(config['stats_server'], username=config['stats_user'],
                        timeout=10, key_filename=config['webserver_keyfile'])
            ftp = ssh.open_sftp()
            ftp.get_channel().settimeout(60)
            logging.info("...connected")

            ftp.chdir('logs')
            files = ftp.listdir()
            for f in files:
                if f.startswith('access-') and f.endswith('.log.gz'):

                    destpath = os.path.join(logsdir, f)
                    destsize = ftp.stat(f).st_size
                    if (not os.path.exists(destpath) or
                            os.path.getsize(destpath) != destsize):
                        logging.debug("...retrieving " + f)
                        ftp.get(f, destpath)
        except Exception:
            traceback.print_exc()
            sys.exit(1)
        finally:
            # Disconnect
            if ftp is not None:
                ftp.close()
            if ssh is not None:
                ssh.close()

    knownapks = common.KnownApks()
    unknownapks = []

    if not options.nologs:
        # Process logs
        logging.info('Processing logs...')
        appscount = Counter()
        appsvercount = Counter()
        logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] ' + \
            '"GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) ' + \
            '\d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
        logsearch = re.compile(logexpr).search
        for logfile in glob.glob(os.path.join(logsdir, 'access-*.log.gz')):
            logging.debug('...' + logfile)

            # Get the date for this log - e.g. 2012-02-28
            thisdate = os.path.basename(logfile)[7:-7]

            agg_path = os.path.join(datadir, thisdate + '.json')
            if not options.recalc and os.path.exists(agg_path):
                # Use previously calculated aggregate data
                with open(agg_path, 'r') as f:
                    today = json.load(f)

            else:
                # Calculate from logs...

                today = {
                    'apps': Counter(),
                    'appsver': Counter(),
                    'unknown': []
                }

                p = subprocess.Popen(["zcat", logfile], stdout=subprocess.PIPE)
                matches = (logsearch(line) for line in p.stdout)
                for match in matches:
                    if not match:
                        continue
                    if match.group('statuscode') != '200':
                        continue
                    if match.group('ip') in config['stats_ignore']:
                        continue
                    uri = match.group('uri')
                    if not uri.endswith('.apk'):
                        continue
                    _, apkname = os.path.split(uri)
                    app = knownapks.getapp(apkname)
                    if app:
                        appid, _ = app
                        today['apps'][appid] += 1
                        # Strip the '.apk' from apkname
                        appver = apkname[:-4]
                        today['appsver'][appver] += 1
                    else:
                        if apkname not in today['unknown']:
                            today['unknown'].append(apkname)

                # Save calculated aggregate data for today to cache
                with open(agg_path, 'w') as f:
                    json.dump(today, f)

            # Add today's stats (whether cached or recalculated) to the total
            for appid in today['apps']:
                appscount[appid] += today['apps'][appid]
            for appid in today['appsver']:
                appsvercount[appid] += today['appsver'][appid]
            for uk in today['unknown']:
                if uk not in unknownapks:
                    unknownapks.append(uk)

        # Calculate and write stats for total downloads...
        lst = []
        alldownloads = 0
        for appid in appscount:
            count = appscount[appid]
            lst.append(appid + " " + str(count))
            if config['stats_to_carbon']:
                carbon_send('fdroid.download.' + appid.replace('.', '_'),
                            count)
            alldownloads += count
        lst.append("ALL " + str(alldownloads))
        with open(os.path.join(statsdir, 'total_downloads_app.txt'), 'w') as f:
            f.write('# Total downloads by application, since October 2011\n')
            for line in sorted(lst):
                f.write(line + '\n')

        lst = []
        for appver in appsvercount:
            count = appsvercount[appver]
            lst.append(appver + " " + str(count))

        with open(os.path.join(statsdir, 'total_downloads_app_version.txt'), 'w') as f:
            f.write('# Total downloads by application and version, '
                    'since October 2011\n')
            for line in sorted(lst):
                f.write(line + "\n")

    # Calculate and write stats for repo types...
    logging.info("Processing repo types...")
    repotypes = Counter()
    for app in metaapps:
        rtype = app.RepoType or 'none'
        if rtype == 'srclib':
            rtype = common.getsrclibvcs(app.Repo)
        repotypes[rtype] += 1
    with open(os.path.join(statsdir, 'repotypes.txt'), 'w') as f:
        for rtype, count in most_common_stable(repotypes):
            f.write(rtype + ' ' + str(count) + '\n')

    # Calculate and write stats for update check modes...
    logging.info("Processing update check modes...")
    ucms = Counter()
    for app in metaapps:
        checkmode = app.UpdateCheckMode
        if checkmode.startswith('RepoManifest/'):
            checkmode = checkmode[:12]
        if checkmode.startswith('Tags '):
            checkmode = checkmode[:4]
        ucms[checkmode] += 1
    with open(os.path.join(statsdir, 'update_check_modes.txt'), 'w') as f:
        for checkmode, count in most_common_stable(ucms):
            f.write(checkmode + ' ' + str(count) + '\n')

    logging.info("Processing categories...")
    ctgs = Counter()
    for app in metaapps:
        for category in app.Categories:
            ctgs[category] += 1
    with open(os.path.join(statsdir, 'categories.txt'), 'w') as f:
        for category, count in most_common_stable(ctgs):
            f.write(category + ' ' + str(count) + '\n')

    logging.info("Processing antifeatures...")
    afs = Counter()
    for app in metaapps:
        if app.AntiFeatures is None:
            continue
        for antifeature in app.AntiFeatures:
            afs[antifeature] += 1
    with open(os.path.join(statsdir, 'antifeatures.txt'), 'w') as f:
        for antifeature, count in most_common_stable(afs):
            f.write(antifeature + ' ' + str(count) + '\n')

    # Calculate and write stats for licenses...
    logging.info("Processing licenses...")
    licenses = Counter()
    for app in metaapps:
        license = app.License
        licenses[license] += 1
    with open(os.path.join(statsdir, 'licenses.txt'), 'w') as f:
        for license, count in most_common_stable(licenses):
            f.write(license + ' ' + str(count) + '\n')

    # Write list of disabled apps...
    logging.info("Processing disabled apps...")
    disabled = [app.id for app in allmetaapps if app.Disabled]
    with open(os.path.join(statsdir, 'disabled_apps.txt'), 'w') as f:
        for appid in sorted(disabled):
            f.write(appid + '\n')

    # Write list of latest apps added to the repo...
    logging.info("Processing latest apps...")
    latest = knownapks.getlatest(10)
    with open(os.path.join(statsdir, 'latestapps.txt'), 'w') as f:
        for appid in latest:
            f.write(appid + '\n')

    if unknownapks:
        logging.info('\nUnknown apks:')
        for apk in unknownapks:
            logging.info(apk)

    logging.info("Finished.")
Ejemplo n.º 41
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser(
        usage="Usage: %prog [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Read all app and srclib metadata
    allapps = metadata.read_metadata()
    apps = common.read_app_args(args, allapps, True)

    probcount = 0

    build_dir = 'build'
    if not os.path.isdir(build_dir):
        logging.info("Creating build directory")
        os.makedirs(build_dir)
    srclib_dir = os.path.join(build_dir, 'srclib')
    extlib_dir = os.path.join(build_dir, 'extlib')

    for appid, app in apps.iteritems():

        if app['Disabled']:
            logging.info("Skipping %s: disabled" % appid)
            continue
        if not app['builds']:
            logging.info("Skipping %s: no builds specified" % appid)
            continue

        logging.info("Processing " + appid)

        try:

            build_dir = 'build/' + appid

            # Set up vcs interface and make sure we have the latest code...
            vcs = common.getvcs(app['Repo Type'], app['Repo'], build_dir)

            for thisbuild in app['builds']:

                if thisbuild['disable']:
                    logging.info(
                        "...skipping version %s - %s" %
                        (thisbuild['version'],
                         thisbuild.get('disable', thisbuild['commit'][1:])))
                else:
                    logging.info("...scanning version " + thisbuild['version'])

                    # Prepare the source code...
                    root_dir, _ = common.prepare_source(
                        vcs, app, thisbuild, build_dir, srclib_dir, extlib_dir,
                        False)

                    # Do the scan...
                    count = common.scan_source(build_dir, root_dir, thisbuild)
                    if count > 0:
                        logging.warn('Scanner found %d problems in %s (%s)' %
                                     (count, appid, thisbuild['vercode']))
                        probcount += count

        except BuildException as be:
            logging.warn("Could not scan app %s due to BuildException: %s" %
                         (appid, be))
            probcount += 1
        except VCSException as vcse:
            logging.warn("VCS error while scanning app %s: %s" % (appid, vcse))
            probcount += 1
        except Exception:
            logging.warn("Could not scan app %s due to unknown error: %s" %
                         (appid, traceback.format_exc()))
            probcount += 1

    logging.info("Finished:")
    print "%d app(s) with problems" % probcount
Ejemplo n.º 42
0
def updater():
    """main() - parse args, make Zenodo updater, execute, catch errors"""

    parser = argparse.ArgumentParser()

    # zenodo / administrative matters
    parser.add_argument("-z", "--zenodo_id", help="zenodo upload key")
    parser.add_argument("-s",
                        "--sandbox",
                        help="use sandbox mode",
                        action="store_true")

    # upload metadata - title, authors, keywords, description, metafile
    parser.add_argument("-m", "--metadata", help="json metadata file")
    parser.add_argument("-T", "--title", help="upload title")
    parser.add_argument("-C",
                        "--creator",
                        help="creator name e.g. Public, Joe Q.",
                        action="append")
    parser.add_argument("-A",
                        "--affiliation",
                        help="creator affiliation",
                        action="append")
    parser.add_argument("-K",
                        "--keyword",
                        help="keyword to associate",
                        action="append")
    parser.add_argument("-D", "--description", help="description")
    args = parser.parse_args()

    # validate metadata - allow file read and update from command line
    # (with that priority)
    if args.metadata:
        metadata = read_metadata(args.metadata)
    else:
        metadata = {}

    if "directory" in metadata:
        del metadata["directory"]

    if "files" in metadata:
        del metadata["files"]

    if "archive" in metadata:
        del metadata["archive"]

    if not args.zenodo_id:
        args.zenodo_id = get_access_token(sandbox=args.sandbox)

    cl_metadata = make_metadata(args.title, args.description, args.creator,
                                args.affiliation, args.keyword)

    metadata.update(cl_metadata)

    # explain what we are going to do
    print("ID: %s" % args.zenodo_id)

    import pprint

    pprint.pprint(metadata)

    # make and act on
    zenodo_updater = ZenodoUpdater(metadata, args.zenodo_id, args.sandbox)
    zenodo_updater.update()
    print("Update complete for deposition %s" %
          str(zenodo_updater.get_deposition()))
Ejemplo n.º 43
0
def main():

    global options, config

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-v", "--verbose", action="store_true", default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q", "--quiet", action="store_true", default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-d", "--download", action="store_true", default=False,
                      help="Download logs we don't have")
    parser.add_option("--recalc", action="store_true", default=False,
                      help="Recalculate aggregate stats - use when changes "
                      "have been made that would invalidate old cached data.")
    parser.add_option("--nologs", action="store_true", default=False,
                      help="Don't do anything logs-related")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    if not config['update_stats']:
        logging.info("Stats are disabled - check your configuration")
        sys.exit(1)

    # Get all metadata-defined apps...
    metaapps = [a for a in metadata.read_metadata().itervalues() if not a['Disabled']]

    statsdir = 'stats'
    logsdir = os.path.join(statsdir, 'logs')
    datadir = os.path.join(statsdir, 'data')
    if not os.path.exists(statsdir):
        os.mkdir(statsdir)
    if not os.path.exists(logsdir):
        os.mkdir(logsdir)
    if not os.path.exists(datadir):
        os.mkdir(datadir)

    if options.download:
        # Get any access logs we don't have...
        ssh = None
        ftp = None
        try:
            logging.info('Retrieving logs')
            ssh = paramiko.SSHClient()
            ssh.load_system_host_keys()
            ssh.connect('f-droid.org', username='******', timeout=10,
                        key_filename=config['webserver_keyfile'])
            ftp = ssh.open_sftp()
            ftp.get_channel().settimeout(60)
            logging.info("...connected")

            ftp.chdir('logs')
            files = ftp.listdir()
            for f in files:
                if f.startswith('access-') and f.endswith('.log.gz'):

                    destpath = os.path.join(logsdir, f)
                    destsize = ftp.stat(f).st_size
                    if (not os.path.exists(destpath) or
                            os.path.getsize(destpath) != destsize):
                        logging.debug("...retrieving " + f)
                        ftp.get(f, destpath)
        except Exception:
            traceback.print_exc()
            sys.exit(1)
        finally:
            # Disconnect
            if ftp is not None:
                ftp.close()
            if ssh is not None:
                ssh.close()

    knownapks = common.KnownApks()
    unknownapks = []

    if not options.nologs:
        # Process logs
        logging.info('Processing logs...')
        appscount = Counter()
        appsvercount = Counter()
        logexpr = '(?P<ip>[.:0-9a-fA-F]+) - - \[(?P<time>.*?)\] ' + \
            '"GET (?P<uri>.*?) HTTP/1.\d" (?P<statuscode>\d+) ' + \
            '\d+ "(?P<referral>.*?)" "(?P<useragent>.*?)"'
        logsearch = re.compile(logexpr).search
        for logfile in glob.glob(os.path.join(logsdir, 'access-*.log.gz')):
            logging.debug('...' + logfile)

            # Get the date for this log - e.g. 2012-02-28
            thisdate = os.path.basename(logfile)[7:-7]

            agg_path = os.path.join(datadir, thisdate + '.json')
            if not options.recalc and os.path.exists(agg_path):
                # Use previously calculated aggregate data
                with open(agg_path, 'r') as f:
                    today = json.load(f)

            else:
                # Calculate from logs...

                today = {
                    'apps': Counter(),
                    'appsver': Counter(),
                    'unknown': []
                    }

                p = subprocess.Popen(["zcat", logfile], stdout=subprocess.PIPE)
                matches = (logsearch(line) for line in p.stdout)
                for match in matches:
                    if not match:
                        continue
                    if match.group('statuscode') != '200':
                        continue
                    if match.group('ip') in config['stats_ignore']:
                        continue
                    uri = match.group('uri')
                    if not uri.endswith('.apk'):
                        continue
                    _, apkname = os.path.split(uri)
                    app = knownapks.getapp(apkname)
                    if app:
                        appid, _ = app
                        today['apps'][appid] += 1
                        # Strip the '.apk' from apkname
                        appver = apkname[:-4]
                        today['appsver'][appver] += 1
                    else:
                        if apkname not in today['unknown']:
                            today['unknown'].append(apkname)

                # Save calculated aggregate data for today to cache
                with open(agg_path, 'w') as f:
                    json.dump(today, f)

            # Add today's stats (whether cached or recalculated) to the total
            for appid in today['apps']:
                appscount[appid] += today['apps'][appid]
            for appid in today['appsver']:
                appsvercount[appid] += today['appsver'][appid]
            for uk in today['unknown']:
                if uk not in unknownapks:
                    unknownapks.append(uk)

        # Calculate and write stats for total downloads...
        lst = []
        alldownloads = 0
        for appid in appscount:
            count = appscount[appid]
            lst.append(appid + " " + str(count))
            if config['stats_to_carbon']:
                carbon_send('fdroid.download.' + appid.replace('.', '_'),
                            count)
            alldownloads += count
        lst.append("ALL " + str(alldownloads))
        f = open('stats/total_downloads_app.txt', 'w')
        f.write('# Total downloads by application, since October 2011\n')
        for line in sorted(lst):
            f.write(line + '\n')
        f.close()

        f = open('stats/total_downloads_app_version.txt', 'w')
        f.write('# Total downloads by application and version, '
                'since October 2011\n')
        lst = []
        for appver in appsvercount:
            count = appsvercount[appver]
            lst.append(appver + " " + str(count))
        for line in sorted(lst):
            f.write(line + "\n")
        f.close()

    # Calculate and write stats for repo types...
    logging.info("Processing repo types...")
    repotypes = Counter()
    for app in metaapps:
        rtype = app['Repo Type'] or 'none'
        if rtype == 'srclib':
            rtype = common.getsrclibvcs(app['Repo'])
        repotypes[rtype] += 1
    f = open('stats/repotypes.txt', 'w')
    for rtype in repotypes:
        count = repotypes[rtype]
        f.write(rtype + ' ' + str(count) + '\n')
    f.close()

    # Calculate and write stats for update check modes...
    logging.info("Processing update check modes...")
    ucms = Counter()
    for app in metaapps:
        checkmode = app['Update Check Mode']
        if checkmode.startswith('RepoManifest/'):
            checkmode = checkmode[:12]
        if checkmode.startswith('Tags '):
            checkmode = checkmode[:4]
        ucms[checkmode] += 1
    f = open('stats/update_check_modes.txt', 'w')
    for checkmode in ucms:
        count = ucms[checkmode]
        f.write(checkmode + ' ' + str(count) + '\n')
    f.close()

    logging.info("Processing categories...")
    ctgs = Counter()
    for app in metaapps:
        for category in app['Categories']:
            ctgs[category] += 1
    f = open('stats/categories.txt', 'w')
    for category in ctgs:
        count = ctgs[category]
        f.write(category + ' ' + str(count) + '\n')
    f.close()

    logging.info("Processing antifeatures...")
    afs = Counter()
    for app in metaapps:
        if app['AntiFeatures'] is None:
            continue
        antifeatures = [a.strip() for a in app['AntiFeatures'].split(',')]
        for antifeature in antifeatures:
            afs[antifeature] += 1
    f = open('stats/antifeatures.txt', 'w')
    for antifeature in afs:
        count = afs[antifeature]
        f.write(antifeature + ' ' + str(count) + '\n')
    f.close()

    # Calculate and write stats for licenses...
    logging.info("Processing licenses...")
    licenses = Counter()
    for app in metaapps:
        license = app['License']
        licenses[license] += 1
    f = open('stats/licenses.txt', 'w')
    for license in licenses:
        count = licenses[license]
        f.write(license + ' ' + str(count) + '\n')
    f.close()

    # Write list of latest apps added to the repo...
    logging.info("Processing latest apps...")
    latest = knownapks.getlatest(10)
    f = open('stats/latestapps.txt', 'w')
    for app in latest:
        f.write(app + '\n')
    f.close()

    if unknownapks:
        logging.info('\nUnknown apks:')
        for apk in unknownapks:
            logging.info(apk)

    logging.info("Finished.")
Ejemplo n.º 44
0
def main():

    global options, config

    options, args = parse_commandline()
    if not args and not options.all:
        raise OptionError(
            "If you really want to build all the apps, use --all", "all")

    config = common.read_config(options)

    if config['build_server_always']:
        options.server = True
    if options.resetserver and not options.server:
        raise OptionError(
            "Using --resetserver without --server makes no sense",
            "resetserver")

    log_dir = 'logs'
    if not os.path.isdir(log_dir):
        logging.info("Creating log directory")
        os.makedirs(log_dir)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    if options.test:
        output_dir = tmp_dir
    else:
        output_dir = 'unsigned'
        if not os.path.isdir(output_dir):
            logging.info("Creating output directory")
            os.makedirs(output_dir)

    if config['archive_older'] != 0:
        also_check_dir = 'archive'
    else:
        also_check_dir = None

    repo_dir = 'repo'

    build_dir = 'build'
    if not os.path.isdir(build_dir):
        logging.info("Creating build directory")
        os.makedirs(build_dir)
    srclib_dir = os.path.join(build_dir, 'srclib')
    extlib_dir = os.path.join(build_dir, 'extlib')

    # Read all app and srclib metadata
    allapps = metadata.read_metadata(xref=not options.onserver)

    apps = common.read_app_args(args, allapps, True)
    for appid, app in apps.items():
        if (app['Disabled'] and not options.force
            ) or not app['Repo Type'] or not app['builds']:
            del apps[appid]

    if not apps:
        raise FDroidException("No apps to process.")

    if options.latest:
        for app in apps.itervalues():
            for build in reversed(app['builds']):
                if build['disable'] and not options.force:
                    continue
                app['builds'] = [build]
                break

    if options.wiki:
        import mwclient
        site = mwclient.Site((config['wiki_protocol'], config['wiki_server']),
                             path=config['wiki_path'])
        site.login(config['wiki_user'], config['wiki_password'])

    # Build applications...
    failed_apps = {}
    build_succeeded = []
    for appid, app in apps.iteritems():

        first = True

        for thisbuild in app['builds']:
            wikilog = None
            try:

                # For the first build of a particular app, we need to set up
                # the source repo. We can reuse it on subsequent builds, if
                # there are any.
                if first:
                    if app['Repo Type'] == 'srclib':
                        build_dir = os.path.join('build', 'srclib',
                                                 app['Repo'])
                    else:
                        build_dir = os.path.join('build', appid)

                    # Set up vcs interface and make sure we have the latest code...
                    logging.debug("Getting {0} vcs interface for {1}".format(
                        app['Repo Type'], app['Repo']))
                    vcs = common.getvcs(app['Repo Type'], app['Repo'],
                                        build_dir)

                    first = False

                logging.debug("Checking " + thisbuild['version'])
                if trybuild(app, thisbuild, build_dir, output_dir,
                            also_check_dir, srclib_dir, extlib_dir, tmp_dir,
                            repo_dir, vcs, options.test, options.server,
                            options.force, options.onserver):
                    build_succeeded.append(app)
                    wikilog = "Build succeeded"
            except BuildException as be:
                logfile = open(os.path.join(log_dir, appid + '.log'), 'a+')
                logfile.write(str(be))
                logfile.close()
                print("Could not build app %s due to BuildException: %s" %
                      (appid, be))
                if options.stop:
                    sys.exit(1)
                failed_apps[appid] = be
                wikilog = be.get_wikitext()
            except VCSException as vcse:
                reason = str(vcse).split(
                    '\n', 1)[0] if options.verbose else str(vcse)
                logging.error("VCS error while building app %s: %s" %
                              (appid, reason))
                if options.stop:
                    sys.exit(1)
                failed_apps[appid] = vcse
                wikilog = str(vcse)
            except Exception as e:
                logging.error(
                    "Could not build app %s due to unknown error: %s" %
                    (appid, traceback.format_exc()))
                if options.stop:
                    sys.exit(1)
                failed_apps[appid] = e
                wikilog = str(e)

            if options.wiki and wikilog:
                try:
                    # Write a page with the last build log for this version code
                    lastbuildpage = appid + '/lastbuild_' + thisbuild['vercode']
                    newpage = site.Pages[lastbuildpage]
                    txt = "Build completed at " + time.strftime(
                        "%Y-%m-%d %H:%M:%SZ", time.gmtime()) + "\n\n" + wikilog
                    newpage.save(txt, summary='Build log')
                    # Redirect from /lastbuild to the most recent build log
                    newpage = site.Pages[appid + '/lastbuild']
                    newpage.save('#REDIRECT [[' + lastbuildpage + ']]',
                                 summary='Update redirect')
                except:
                    logging.error(
                        "Error while attempting to publish build log")

    for app in build_succeeded:
        logging.info("success: %s" % (app['id']))

    if not options.verbose:
        for fa in failed_apps:
            logging.info("Build for app %s failed:\n%s" %
                         (fa, failed_apps[fa]))

    logging.info("Finished.")
    if len(build_succeeded) > 0:
        logging.info(str(len(build_succeeded)) + ' builds succeeded')
    if len(failed_apps) > 0:
        logging.info(str(len(failed_apps)) + ' builds failed')

    sys.exit(0)
Ejemplo n.º 45
0
def main():

    global config, options

    # Parse command line...
    parser = ArgumentParser(usage="%(prog)s [options] "
                            "[APPID[:VERCODE] [APPID[:VERCODE] ...]]")
    common.setup_global_opts(parser)
    parser.add_argument("appid", nargs='*', help="app-id with optional versioncode in the form APPID[:VERCODE]")
    options = parser.parse_args()

    config = common.read_config(options)

    log_dir = 'logs'
    if not os.path.isdir(log_dir):
        logging.info("Creating log directory")
        os.makedirs(log_dir)

    tmp_dir = 'tmp'
    if not os.path.isdir(tmp_dir):
        logging.info("Creating temporary directory")
        os.makedirs(tmp_dir)

    output_dir = 'repo'
    if not os.path.isdir(output_dir):
        logging.info("Creating output directory")
        os.makedirs(output_dir)

    unsigned_dir = 'unsigned'
    if not os.path.isdir(unsigned_dir):
        logging.warning("No unsigned directory - nothing to do")
        sys.exit(1)

    for f in [config['keystorepassfile'],
              config['keystore'],
              config['keypassfile']]:
        if not os.path.exists(f):
            logging.error("Config error - missing '{0}'".format(f))
            sys.exit(1)

    # It was suggested at
    #    https://dev.guardianproject.info/projects/bazaar/wiki/FDroid_Audit
    # that a package could be crafted, such that it would use the same signing
    # key as an existing app. While it may be theoretically possible for such a
    # colliding package ID to be generated, it seems virtually impossible that
    # the colliding ID would be something that would be a) a valid package ID,
    # and b) a sane-looking ID that would make its way into the repo.
    # Nonetheless, to be sure, before publishing we check that there are no
    # collisions, and refuse to do any publishing if that's the case...
    allapps = metadata.read_metadata()
    vercodes = common.read_pkg_args(options.appid, True)
    allaliases = []
    for appid in allapps:
        m = md5.new()
        m.update(appid)
        keyalias = m.hexdigest()[:8]
        if keyalias in allaliases:
            logging.error("There is a keyalias collision - publishing halted")
            sys.exit(1)
        allaliases.append(keyalias)
    logging.info("{0} apps, {0} key aliases".format(len(allapps),
                                                    len(allaliases)))

    # Process any apks that are waiting to be signed...
    for apkfile in sorted(glob.glob(os.path.join(unsigned_dir, '*.apk'))):

        appid, vercode = common.apknameinfo(apkfile)
        apkfilename = os.path.basename(apkfile)
        if vercodes and appid not in vercodes:
            continue
        if appid in vercodes and vercodes[appid]:
            if vercode not in vercodes[appid]:
                continue
        logging.info("Processing " + apkfile)

        # There ought to be valid metadata for this app, otherwise why are we
        # trying to publish it?
        if appid not in allapps:
            logging.error("Unexpected {0} found in unsigned directory"
                          .format(apkfilename))
            sys.exit(1)
        app = allapps[appid]

        if app.Binaries is not None:

            # It's an app where we build from source, and verify the apk
            # contents against a developer's binary, and then publish their
            # version if everything checks out.
            # The binary should already have been retrieved during the build
            # process.
            srcapk = apkfile + ".binary"

            # Compare our unsigned one with the downloaded one...
            compare_result = common.verify_apks(srcapk, apkfile, tmp_dir)
            if compare_result:
                logging.error("...verification failed - publish skipped : "
                              + compare_result)
                continue

            # Success! So move the downloaded file to the repo, and remove
            # our built version.
            shutil.move(srcapk, os.path.join(output_dir, apkfilename))
            os.remove(apkfile)

        else:

            # It's a 'normal' app, i.e. we sign and publish it...

            # Figure out the key alias name we'll use. Only the first 8
            # characters are significant, so we'll use the first 8 from
            # the MD5 of the app's ID and hope there are no collisions.
            # If a collision does occur later, we're going to have to
            # come up with a new alogrithm, AND rename all existing keys
            # in the keystore!
            if appid in config['keyaliases']:
                # For this particular app, the key alias is overridden...
                keyalias = config['keyaliases'][appid]
                if keyalias.startswith('@'):
                    m = md5.new()
                    m.update(keyalias[1:])
                    keyalias = m.hexdigest()[:8]
            else:
                m = md5.new()
                m.update(appid)
                keyalias = m.hexdigest()[:8]
            logging.info("Key alias: " + keyalias)

            # See if we already have a key for this application, and
            # if not generate one...
            p = FDroidPopen(['keytool', '-list',
                             '-alias', keyalias, '-keystore', config['keystore'],
                             '-storepass:file', config['keystorepassfile']])
            if p.returncode != 0:
                logging.info("Key does not exist - generating...")
                p = FDroidPopen(['keytool', '-genkey',
                                 '-keystore', config['keystore'],
                                 '-alias', keyalias,
                                 '-keyalg', 'RSA', '-keysize', '2048',
                                 '-validity', '10000',
                                 '-storepass:file', config['keystorepassfile'],
                                 '-keypass:file', config['keypassfile'],
                                 '-dname', config['keydname']])
                # TODO keypass should be sent via stdin
                if p.returncode != 0:
                    raise BuildException("Failed to generate key")

            # Sign the application...
            p = FDroidPopen(['jarsigner', '-keystore', config['keystore'],
                             '-storepass:file', config['keystorepassfile'],
                             '-keypass:file', config['keypassfile'], '-sigalg',
                             'SHA1withRSA', '-digestalg', 'SHA1',
                             apkfile, keyalias])
            # TODO keypass should be sent via stdin
            if p.returncode != 0:
                raise BuildException("Failed to sign application")

            # Zipalign it...
            p = SdkToolsPopen(['zipalign', '-v', '4', apkfile,
                               os.path.join(output_dir, apkfilename)])
            if p.returncode != 0:
                raise BuildException("Failed to align application")
            os.remove(apkfile)

        # Move the source tarball into the output directory...
        tarfilename = apkfilename[:-4] + '_src.tar.gz'
        tarfile = os.path.join(unsigned_dir, tarfilename)
        if os.path.exists(tarfile):
            shutil.move(tarfile, os.path.join(output_dir, tarfilename))

        logging.info('Published ' + apkfilename)
Ejemplo n.º 46
0
def main():

    global config, options, curid, count
    curid = None

    count = Counter()

    def warn(message):
        global curid, count
        if curid:
            print "%s:" % curid
            curid = None
            count["app"] += 1
        print "    %s" % message
        count["warn"] += 1

    # Parse command line...
    parser = OptionParser(usage="Usage: %prog [options] [APPID [APPID ...]]")
    parser.add_option(
        "-v", "--verbose", action="store_true", default=False, help="Spew out even more information than normal"
    )
    parser.add_option(
        "-q", "--quiet", action="store_true", default=False, help="Restrict output to warnings and errors"
    )
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    # Get all apps...
    allapps = metadata.read_metadata(xref=False)
    apps = common.read_app_args(args, allapps, False)

    filling_ucms = re.compile("^(Tags.*|RepoManifest.*)")

    for appid, app in apps.iteritems():
        if app["Disabled"]:
            continue

        curid = appid
        count["app_total"] += 1

        # enabled_builds = 0
        lowest_vercode = -1
        curbuild = None
        for build in app["builds"]:
            if not build["disable"]:
                # enabled_builds += 1
                vercode = int(build["vercode"])
                if lowest_vercode == -1 or vercode < lowest_vercode:
                    lowest_vercode = vercode
            if not curbuild or int(build["vercode"]) > int(curbuild["vercode"]):
                curbuild = build

        # Incorrect UCM
        if (
            curbuild
            and curbuild["commit"]
            and app["Update Check Mode"] == "RepoManifest"
            and not curbuild["commit"].startswith("unknown")
            and curbuild["vercode"] == app["Current Version Code"]
            and not curbuild["forcevercode"]
            and any(s in curbuild["commit"] for s in ".,_-/")
        ):
            warn(
                "Last used commit '%s' looks like a tag, but Update Check Mode is '%s'"
                % (curbuild["commit"], app["Update Check Mode"])
            )

        # Summary size limit
        summ_chars = len(app["Summary"])
        if summ_chars > config["char_limits"]["Summary"]:
            warn("Summary of length %s is over the %i char limit" % (summ_chars, config["char_limits"]["Summary"]))

        # Redundant info
        if app["Web Site"] and app["Source Code"]:
            if app["Web Site"].lower() == app["Source Code"].lower():
                warn("Website '%s' is just the app's source code link" % app["Web Site"])

        if filling_ucms.match(app["Update Check Mode"]):
            if all(
                app[f] == metadata.app_defaults[f] for f in ["Auto Name", "Current Version", "Current Version Code"]
            ):
                warn("UCM is set but it looks like checkupdates hasn't been run yet")

        if app["Update Check Name"] == appid:
            warn("Update Check Name is set to the known app id - it can be removed")

        cvc = int(app["Current Version Code"])
        if cvc > 0 and cvc < lowest_vercode:
            warn("Current Version Code is lower than any enabled build")

        # Missing or incorrect categories
        if not app["Categories"]:
            warn("Categories are not set")
        for categ in app["Categories"]:
            if categ not in categories:
                warn("Category '%s' is not valid" % categ)

        if app["Name"] and app["Name"] == app["Auto Name"]:
            warn("Name '%s' is just the auto name" % app["Name"])

        name = app["Name"] or app["Auto Name"]
        if app["Summary"] and name:
            if app["Summary"].lower() == name.lower():
                warn("Summary '%s' is just the app's name" % app["Summary"])

        desc = app["Description"]
        if app["Summary"] and desc and len(desc) == 1:
            if app["Summary"].lower() == desc[0].lower():
                warn("Description '%s' is just the app's summary" % app["Summary"])

        # Description size limit
        desc_charcount = sum(len(l) for l in desc)
        if desc_charcount > config["char_limits"]["Description"]:
            warn(
                "Description of length %s is over the %i char limit"
                % (desc_charcount, config["char_limits"]["Description"])
            )

        if not desc[0] or not desc[-1] or any(not desc[l - 1] and not desc[l] for l in range(1, len(desc))):
            warn("Description has an extra empty line")

        # Check for lists using the wrong characters
        validchars = ["*", "#"]
        lchar = ""
        lcount = 0
        for l in app["Description"]:
            if len(l) < 1:
                continue

            for um in desc_url.finditer(l):
                url = um.group(1)
                for m, r in http_warnings:
                    if m.match(url):
                        warn("URL '%s' in Description: %s" % (url, r))

            c = l.decode("utf-8")[0]
            if c == lchar:
                lcount += 1
                if lcount > 3 and lchar not in validchars:
                    warn("Description has a list (%s) but it isn't bulleted (*) nor numbered (#)" % lchar)
                    break
            else:
                lchar = c
                lcount = 1

        # Regex checks in all kinds of fields
        for f in regex_warnings:
            for m, r in regex_warnings[f]:
                v = app[f]
                if type(v) == str:
                    if v is None:
                        continue
                    if m.match(v):
                        warn("%s '%s': %s" % (f, v, r))
                elif type(v) == list:
                    for l in v:
                        if m.match(l):
                            warn("%s at line '%s': %s" % (f, l, r))

        # Build warnings
        for build in app["builds"]:
            if build["disable"]:
                continue
            for s in ["master", "origin", "HEAD", "default", "trunk"]:
                if build["commit"] and build["commit"].startswith(s):
                    warn("Branch '%s' used as commit in build '%s'" % (s, build["version"]))
                for srclib in build["srclibs"]:
                    ref = srclib.split("@")[1].split("/")[0]
                    if ref.startswith(s):
                        warn("Branch '%s' used as commit in srclib '%s'" % (s, srclib))

        if not curid:
            print

    logging.info(
        "Found a total of %i warnings in %i apps out of %i total." % (count["warn"], count["app"], count["app_total"])
    )

    sys.exit(1 if count["warn"] > 0 else 0)
Ejemplo n.º 47
0
def main():

    global config, options

    # Parse command line...
    parser = OptionParser()
    parser.add_option("-c",
                      "--create-metadata",
                      action="store_true",
                      default=False,
                      help="Create skeleton metadata files that are missing")
    parser.add_option("--delete-unknown",
                      action="store_true",
                      default=False,
                      help="Delete APKs without metadata from the repo")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      default=False,
                      help="Spew out even more information than normal")
    parser.add_option("-q",
                      "--quiet",
                      action="store_true",
                      default=False,
                      help="Restrict output to warnings and errors")
    parser.add_option("-b",
                      "--buildreport",
                      action="store_true",
                      default=False,
                      help="Report on build data status")
    parser.add_option(
        "-i",
        "--interactive",
        default=False,
        action="store_true",
        help="Interactively ask about things that need updating.")
    parser.add_option(
        "-I",
        "--icons",
        action="store_true",
        default=False,
        help="Resize all the icons exceeding the max pixel size and exit")
    parser.add_option(
        "-e",
        "--editor",
        default="/etc/alternatives/editor",
        help="Specify editor to use in interactive mode. Default " +
        "is /etc/alternatives/editor")
    parser.add_option("-w",
                      "--wiki",
                      default=False,
                      action="store_true",
                      help="Update the wiki")
    parser.add_option("",
                      "--pretty",
                      action="store_true",
                      default=False,
                      help="Produce human-readable index.xml")
    parser.add_option(
        "--clean",
        action="store_true",
        default=False,
        help="Clean update - don't uses caches, reprocess all apks")
    (options, args) = parser.parse_args()

    config = common.read_config(options)

    repodirs = ['repo']
    if config['archive_older'] != 0:
        repodirs.append('archive')
        if not os.path.exists('archive'):
            os.mkdir('archive')

    if options.icons:
        resize_all_icons(repodirs)
        sys.exit(0)

    # check that icons exist now, rather than fail at the end of `fdroid update`
    for k in ['repo_icon', 'archive_icon']:
        if k in config:
            if not os.path.exists(config[k]):
                logging.critical(k + ' "' + config[k] +
                                 '" does not exist! Correct it in config.py.')
                sys.exit(1)

    # Get all apps...
    apps = metadata.read_metadata()

    # Generate a list of categories...
    categories = set()
    for app in apps.itervalues():
        categories.update(app['Categories'])

    # Read known apks data (will be updated and written back when we've finished)
    knownapks = common.KnownApks()

    # Gather information about all the apk files in the repo directory, using
    # cached data if possible.
    apkcachefile = os.path.join('tmp', 'apkcache')
    if not options.clean and os.path.exists(apkcachefile):
        with open(apkcachefile, 'rb') as cf:
            apkcache = pickle.load(cf)
    else:
        apkcache = {}
    cachechanged = False

    delete_disabled_builds(apps, apkcache, repodirs)

    # Scan all apks in the main repo
    apks, cc = scan_apks(apps, apkcache, repodirs[0], knownapks)
    if cc:
        cachechanged = True

    # Generate warnings for apk's with no metadata (or create skeleton
    # metadata files, if requested on the command line)
    newmetadata = False
    for apk in apks:
        if apk['id'] not in apps:
            if options.create_metadata:
                if 'name' not in apk:
                    logging.error(apk['id'] +
                                  ' does not have a name! Skipping...')
                    continue
                f = open(os.path.join('metadata', apk['id'] + '.txt'), 'w')
                f.write("License:Unknown\n")
                f.write("Web Site:\n")
                f.write("Source Code:\n")
                f.write("Issue Tracker:\n")
                f.write("Summary:" + apk['name'] + "\n")
                f.write("Description:\n")
                f.write(apk['name'] + "\n")
                f.write(".\n")
                f.close()
                logging.info("Generated skeleton metadata for " + apk['id'])
                newmetadata = True
            else:
                msg = apk['apkname'] + " (" + apk['id'] + ") has no metadata!"
                if options.delete_unknown:
                    logging.warn(msg + "\n\tdeleting: repo/" + apk['apkname'])
                    rmf = os.path.join(repodirs[0], apk['apkname'])
                    if not os.path.exists(rmf):
                        logging.error(
                            "Could not find {0} to remove it".format(rmf))
                    else:
                        os.remove(rmf)
                else:
                    logging.warn(msg +
                                 "\n\tUse `fdroid update -c` to create it.")

    # update the metadata with the newly created ones included
    if newmetadata:
        apps = metadata.read_metadata()

    # Scan the archive repo for apks as well
    if len(repodirs) > 1:
        archapks, cc = scan_apks(apps, apkcache, repodirs[1], knownapks)
        if cc:
            cachechanged = True
    else:
        archapks = []

    # Some information from the apks needs to be applied up to the application
    # level. When doing this, we use the info from the most recent version's apk.
    # We deal with figuring out when the app was added and last updated at the
    # same time.
    for appid, app in apps.iteritems():
        bestver = 0
        added = None
        lastupdated = None
        for apk in apks + archapks:
            if apk['id'] == appid:
                if apk['versioncode'] > bestver:
                    bestver = apk['versioncode']
                    bestapk = apk

                if 'added' in apk:
                    if not added or apk['added'] < added:
                        added = apk['added']
                    if not lastupdated or apk['added'] > lastupdated:
                        lastupdated = apk['added']

        if added:
            app['added'] = added
        else:
            logging.warn("Don't know when " + appid + " was added")
        if lastupdated:
            app['lastupdated'] = lastupdated
        else:
            logging.warn("Don't know when " + appid + " was last updated")

        if bestver == 0:
            if app['Name'] is None:
                app['Name'] = appid
            app['icon'] = None
            logging.warn("Application " + appid + " has no packages")
        else:
            if app['Name'] is None:
                app['Name'] = bestapk['name']
            app['icon'] = bestapk['icon'] if 'icon' in bestapk else None

    # Sort the app list by name, then the web site doesn't have to by default.
    # (we had to wait until we'd scanned the apks to do this, because mostly the
    # name comes from there!)
    sortedids = sorted(apps.iterkeys(),
                       key=lambda appid: apps[appid]['Name'].upper())

    if len(repodirs) > 1:
        archive_old_apks(apps, apks, archapks, repodirs[0], repodirs[1],
                         config['archive_older'])

    # Make the index for the main repo...
    make_index(apps, sortedids, apks, repodirs[0], False, categories)

    # If there's an archive repo,  make the index for it. We already scanned it
    # earlier on.
    if len(repodirs) > 1:
        make_index(apps, sortedids, archapks, repodirs[1], True, categories)

    if config['update_stats']:

        # Update known apks info...
        knownapks.writeifchanged()

        # Generate latest apps data for widget
        if os.path.exists(os.path.join('stats', 'latestapps.txt')):
            data = ''
            for line in file(os.path.join('stats', 'latestapps.txt')):
                appid = line.rstrip()
                data += appid + "\t"
                app = apps[appid]
                data += app['Name'] + "\t"
                if app['icon'] is not None:
                    data += app['icon'] + "\t"
                data += app['License'] + "\n"
            f = open(os.path.join(repodirs[0], 'latestapps.dat'), 'w')
            f.write(data)
            f.close()

    if cachechanged:
        with open(apkcachefile, 'wb') as cf:
            pickle.dump(apkcache, cf)

    # Update the wiki...
    if options.wiki:
        update_wiki(apps, sortedids, apks + archapks)

    logging.info("Finished.")