示例#1
0
文件: jpache.py 项目: lowks/staller
def main(argv=None):
    packages = [
        ( 'http://ant.apache.org/bindownload.cgi', 
          'apache-ant', 
        ),
        ( 'http://maven.apache.org/download.cgi', 
          'apache-maven',
        ),
        ( 'http://tomcat.apache.org/download-70.cgi', 
          'apache-tomcat',
        ),
    ]

    parser = argparse.ArgumentParser( )
    parser.add_argument('-p', '--prefix', required=True)
    parser.add_argument('-t', '--tempdir', required=False)
    parser.add_argument('-f', '--force', action='store_true', required=False)

    if argv is None:
        argv = parser.parse_args()

    if os.path.isfile(os.path.join(argv.prefix,'apache-ant','bin','ant')) and not argv.force:
        print "been done? use -f/--force to force rebuild"
        exit(0)

    keys = [ 
        'https://www.apache.org/dist/ant/KEYS', 
        'https://www.apache.org/dist/maven/KEYS', 
        'https://www.apache.org/dist/tomcat/tomcat-connectors/KEYS',
	'https://www.apache.org/dist/tomcat/tomcat-7/KEYS'
    ]

    if argv.tempdir:
        tempfile.tempdir = argv.tempdir

    tmp = tempfile.mkdtemp(prefix="apache_java_")
    key_import(keys, tmp)
    os.chdir(tmp)
    pp(tmp)

    for (url, package) in packages:
        archive = scraper(url, package, tmp)
        os.chdir(argv.prefix)
        print subprocess.check_output(['tar', 'zxf', archive])
        src_dir = archive[:-7] # strip off '.tar.gz'
        src_dir = os.path.basename(src_dir)
        if src_dir.endswith('-bin'):
            src_dir = src_dir[:-4]
        print src_dir
        try:
            os.symlink(src_dir, package)
        except OSError, e:
            if e.errno != 17 and not argv.force: #symlink already exists OK
                raise e
示例#2
0
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-p', '--prefix', required=True)
    parser.add_argument('-v', '--version', required=True)
    parser.add_argument('-t', '--tempdir', required=False)
    parser.add_argument('-f', '--force', action='store_true', required=False)

    if argv is None:
        argv = parser.parse_args()

    if os.path.isfile(os.path.join(argv.prefix, 'apache-solr',
                                   'bin')) and not argv.force:
        print "been done? use -f/--force to force rebuild"
        exit(0)

    # https://archive.apache.org/dist/lucene/solr/4.7.0/solr-4.7.0.tgz

    sbase = ''.join(
        ['https://archive.apache.org/dist/lucene/solr/', argv.version, '/'])
    skeys = ''.join([sbase, 'KEYS'])
    download_url = ''.join([sbase, 'solr-', argv.version, '.tgz'])
    md5_url = ''.join([download_url, '.md5'])
    pgp_url = ''.join([download_url, '.asc'])

    keys = [skeys]

    if argv.tempdir:
        tempfile.tempdir = argv.tempdir

    tmp = tempfile.mkdtemp(prefix="apache_solr_")
    key_import(keys, tmp)
    os.chdir(tmp)
    pp(tmp)

    archive = checked_archive(download_url, md5_url, pgp_url, tmp)
    pp(archive)
    os.chdir(argv.prefix)
    print subprocess.check_output(['tar', 'zxf', archive])
    src_dir = archive[:-4]  # strip off '.tgz'
    src_dir = os.path.basename(src_dir)
    print src_dir
    try:
        os.symlink(src_dir, 'apache-solr')
    except OSError, e:
        if e.errno != 17 and not argv.force:  #symlink already exists OK
            raise e
示例#3
0
文件: solr_it.py 项目: lowks/staller
def main(argv=None):
    parser = argparse.ArgumentParser( )
    parser.add_argument('-p', '--prefix', required=True)
    parser.add_argument('-v', '--version', required=True)
    parser.add_argument('-t', '--tempdir', required=False)
    parser.add_argument('-f', '--force', action='store_true', required=False)

    if argv is None:
        argv = parser.parse_args()

    if os.path.isfile(os.path.join(argv.prefix,'apache-solr','bin')) and not argv.force:
        print "been done? use -f/--force to force rebuild"
        exit(0)

    # https://archive.apache.org/dist/lucene/solr/4.7.0/solr-4.7.0.tgz

    sbase = ''.join(['https://archive.apache.org/dist/lucene/solr/', argv.version, '/'])
    skeys = ''.join([sbase, 'KEYS'])
    download_url = ''.join([sbase, 'solr-', argv.version, '.tgz'])
    md5_url = ''.join([download_url, '.md5'])
    pgp_url = ''.join([download_url, '.asc'])

    keys = [ skeys ]

    if argv.tempdir:
        tempfile.tempdir = argv.tempdir

    tmp = tempfile.mkdtemp(prefix="apache_solr_")
    key_import(keys, tmp)
    os.chdir(tmp)
    pp(tmp)

    archive = checked_archive(download_url, md5_url, pgp_url, tmp)
    pp(archive)
    os.chdir(argv.prefix)
    print subprocess.check_output(['tar', 'zxf', archive])
    src_dir = archive[:-4] # strip off '.tgz'
    src_dir = os.path.basename(src_dir)
    print src_dir
    try:
        os.symlink(src_dir, 'apache-solr')
    except OSError, e:
        if e.errno != 17 and not argv.force: #symlink already exists OK
            raise e
示例#4
0
def main(argv=None):
    # https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPLinuxSourceBuild
    packages = [
        ( 'https://shibboleth.net/downloads/log4shib/latest/', 
          'log4shib', 
          './configure --disable-static --disable-doxygen --prefix={0}',
        ),
        ( 'https://xerces.apache.org/xerces-c/download.cgi', 
          'xerces-c',
          './configure --prefix={0} --disable-netaccessor-curl --disable-transcoder-gnuiconv',
        ),
        ( 'https://santuario.apache.org/download.html', 
          'xml-security-c',
          './configure --without-xalan --disable-static --prefix={0} --with-xerces={0} --with-openssl=/usr',
        ),
        ( 'https://shibboleth.net/downloads/c++-opensaml/latest/', 
          'xmltooling',
          './configure --with-log4shib={0} --prefix={0} -C --with-boost={1}'
        ),
        ( 'https://shibboleth.net/downloads/c++-opensaml/latest/', 
          'opensaml',
          './configure --with-log4shib={0} --prefix={0} -C --with-boost={1}/include'
        ),
        ( 'https://shibboleth.net/downloads/service-provider/latest/', 
          'shibboleth-sp',
          './configure --with-log4shib={0} --enable-apache-22 --with-apxs2={1}/sbin/apxs --prefix={0} --with-openssl=/usr --with-boost={1}/include'
        ),
    ]
    parser = argparse.ArgumentParser( )
    parser.add_argument('-p', '--prefix', required=True)
    parser.add_argument('-o', '--other-prefix', required=True, help="boost headers")
    parser.add_argument('-t', '--tempdir', required=False)
    parser.add_argument('-f', '--force', action='store_true', required=False)

    if argv is None:
        argv = parser.parse_args()

    shibd_path = os.path.join(argv.prefix,'sbin','shibd')

    if os.path.isfile(shibd_path) and not argv.force:
        print "been done? use -f/--force to force rebuild"
        exit(0)

    keys = [ 
        'https://www.apache.org/dist/santuario/KEYS', 
        'https://www.apache.org/dist/xerces/c/KEYS', 
    ]

    if argv.tempdir:
        mkdir_p(argv.tempdir)
        tempfile.tempdir = argv.tempdir

    tmp = tempfile.mkdtemp(prefix="shib_builder_")
    key_import(keys, tmp)
    os.chdir(tmp)
    pp(tmp)

    os.environ['CFLAGS'] = os.environ['CPPFLAGS'] = "-I {0}/include".format(argv.prefix)
    os.environ['LDFLAGS'] = "-L{0}/lib".format(argv.prefix)
    resetldpath(argv.prefix)
    #resetldpath(argv.prefix, argv.other_prefix)

    for (url, package, configure) in packages:
        # scraper looks at the "latest download" web page, finds the newest .tar.gz, 
        # verfies MD5 checksum and and the pgp signature
        # downloads verified package to `tmp` and returns the path to the .tar.gz
        archive = scraper(url, package, tmp)
        os.chdir(tmp)
        print subprocess.check_output(['tar', 'zxf', archive])
        src_dir = archive[:-7] # strip off '.tar.gz'
        print src_dir
        os.chdir(src_dir)
        # --with-boost=/registry/pkg/include
        config_command = configure.format(argv.prefix, argv.other_prefix)
        print config_command
        subprocess.check_output(config_command.split())
        subprocess.check_output(['make'])
        subprocess.check_output(['make', 'install'])

    # test the shib command when we are done
    subprocess.check_output([shibd_path, '-t'])

    # save config.logs from source building tree before deleting?
    shutil.rmtree(tmp)