コード例 #1
0
ファイル: repos.py プロジェクト: padrigali/ybd
def checkout(name, repo, ref, checkout):
    gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
    if not os.path.exists(gitdir):
        mirror(name, repo)
    elif not mirror_has_ref(gitdir, ref):
        update_mirror(name, repo, gitdir)
    # checkout the required version of this from git
    with open(os.devnull, "w") as fnull:
        # We need to pass '--no-hardlinks' because right now there's nothing to
        # stop the build from overwriting the files in the .git directory
        # inside the sandbox. If they were hardlinks, it'd be possible for a
        # build to corrupt the repo cache. I think it would be faster if we
        # removed --no-hardlinks, though.
        if call(['git', 'clone', '--no-hardlinks', gitdir, checkout],
                stdout=fnull, stderr=fnull):
            app.exit(name, 'ERROR: git clone failed for', ref)

        with app.chdir(checkout):
            if call(['git', 'checkout', '--force', ref], stdout=fnull,
                    stderr=fnull):
                app.exit(name, 'ERROR: git checkout failed for', ref)

            app.log(name, 'Git checkout %s in %s' % (repo, checkout))
            app.log(name, 'Upstream version %s' % get_version(checkout, ref))

            if os.path.exists('.gitmodules'):
                checkout_submodules(name, ref)

    utils.set_mtime_recursively(checkout)
コード例 #2
0
ファイル: repos.py プロジェクト: gtristan/ybd
def mirror(name, repo):
    tempfile.tempdir = app.config['tmp']
    tmpdir = tempfile.mkdtemp()
    repo_url = get_repo_url(repo)
    try:
        tar_file = get_repo_name(repo_url) + '.tar'
        app.log(name, 'Try fetching tarball %s' % tar_file)
        # try tarball first
        with app.chdir(tmpdir), open(os.devnull, "w") as fnull:
            call(['wget', os.path.join(app.config['tar-url'], tar_file)])
            call(['tar', 'xf', tar_file], stderr=fnull)
            os.remove(tar_file)
            update_mirror(name, repo, tmpdir)
    except:
        app.log(name, 'Try git clone from', repo_url)
        with open(os.devnull, "w") as fnull:
            if call(['git', 'clone', '--mirror', '-n', repo_url, tmpdir]):
                app.exit(name, 'ERROR: failed to clone', repo)

    with app.chdir(tmpdir):
        if call(['git', 'rev-parse']):
            app.exit(name, 'ERROR: problem mirroring git repo at', tmpdir)

    gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
    try:
        shutil.move(tmpdir, gitdir)
        app.log(name, 'Git repo is mirrored at', gitdir)
    except:
        pass
コード例 #3
0
ファイル: repos.py プロジェクト: rdale/ybd
def get_tree(this):
    ref = this["ref"]
    gitdir = os.path.join(app.config["gits"], get_repo_name(this["repo"]))
    if this["repo"].startswith("file://") or this["repo"].startswith("/"):
        gitdir = this["repo"].replace("file://", "")
        if not os.path.isdir(gitdir):
            app.exit(this, "ERROR: git repo not found:", this["repo"])

    if not os.path.exists(gitdir):
        try:
            url = app.config["tree-server"] + "repo=" + get_repo_url(this["repo"]) + "&ref=" + ref
            response = requests.get(url=url)
            tree = response.json()["tree"]
            return tree
        except:
            if app.config.get("tree-server"):
                app.log(this, "WARNING: no tree from tree-server for", ref)

        mirror(this["name"], this["repo"])

    with app.chdir(gitdir), open(os.devnull, "w") as fnull:
        if call(["git", "rev-parse", ref + "^{object}"], stdout=fnull, stderr=fnull):
            # can't resolve this ref. is it upstream?
            app.log(this, "Fetching from upstream to resolve %s" % ref)
            call(["git", "fetch", "origin"], stdout=fnull, stderr=fnull)

        try:
            tree = check_output(["git", "rev-parse", ref + "^{tree}"], universal_newlines=True)[0:-1]
            return tree

        except:
            # either we don't have a git dir, or ref is not unique
            # or ref does not exist
            app.exit(this, "ERROR: could not find tree for ref", (ref, gitdir))
コード例 #4
0
ファイル: concourse.py プロジェクト: leeming/ybd
def write_pipeline(defs, target):
    target = defs.get(target)
    config = {'run': {'path': 'ybd', 'args': []},
              'platform': 'linux',
              'image': 'docker:///devcurmudgeon/foo'}

    aggregate = []
    passed = []
    for it in target.get('contents', []) + target.get('build-depends', []):
        component = defs.get(it)
        if component.get('repo'):
            log('AGGREGATE', 'Adding aggregate for', component['name'])
            aggregate += [{'get': component['name']}]
        else:
            log('PASSED', 'Adding passed for', component['name'])
            aggregate += [{'get': component['name']}]
            passed += [component['name']]

    plan = [{'task': 'Build', 'config': config}, {'aggregate': aggregate}]
    job = {'name': target['name'], 'plan': plan, 'passed': passed}
    pipeline = {'resources': inputs(defs, target), 'jobs': [job]}

    output = './pipeline.yml'
    with open(output, 'w') as f:
        f.write(yaml.dump(pipeline, default_flow_style=False))

    exit('CONCOURSE', 'pipeline is at', output)
コード例 #5
0
def mirror(name, repo):
    tempfile.tempdir = app.config['tmp']
    tmpdir = tempfile.mkdtemp()
    repo_url = get_repo_url(repo)
    try:
        tar_file = get_repo_name(repo_url) + '.tar'
        app.log(name, 'Try fetching tarball %s' % tar_file)
        # try tarball first
        with app.chdir(tmpdir), open(os.devnull, "w") as fnull:
            call(['wget', os.path.join(app.config['tar-url'], tar_file)])
            call(['tar', 'xf', tar_file], stderr=fnull)
            os.remove(tar_file)
            update_mirror(name, repo, tmpdir)
    except:
        app.log(name, 'Try git clone from', repo_url)
        with open(os.devnull, "w") as fnull:
            if call(['git', 'clone', '--mirror', '-n', repo_url, tmpdir]):
                app.exit(name, 'ERROR: failed to clone', repo)

    with app.chdir(tmpdir):
        if call(['git', 'rev-parse']):
            app.exit(name, 'ERROR: problem mirroring git repo at', tmpdir)

    gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
    try:
        shutil.move(tmpdir, gitdir)
        app.log(name, 'Git repo is mirrored at', gitdir)
    except:
        pass
コード例 #6
0
ファイル: repos.py プロジェクト: mwilliams-ct/ybd
def extract_commit(name, repo, ref, target_dir):
    '''Check out a single commit (or tree) from a Git repo.
    The checkout() function actually clones the entire repo, so this
    function is much quicker when you don't need to copy the whole repo into
    target_dir.
    '''
    gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
    if not os.path.exists(gitdir):
        mirror(name, repo)
    elif not mirror_has_ref(gitdir, ref):
        update_mirror(name, repo, gitdir)

    with tempfile.NamedTemporaryFile() as git_index_file:
        git_env = os.environ.copy()
        git_env['GIT_INDEX_FILE'] = git_index_file.name
        git_env['GIT_WORK_TREE'] = target_dir

        app.log(name, 'Extracting commit', ref)
        if call(['git', 'read-tree', ref], env=git_env, cwd=gitdir):
            app.exit(name, 'ERROR: git read-tree failed for', ref)
        app.log(name, 'Then checkout index', ref)
        if call(['git', 'checkout-index', '--all'], env=git_env, cwd=gitdir):
            app.exit(name, 'ERROR: git checkout-index failed for', ref)
        app.log(name, 'Done', ref)

    utils.set_mtime_recursively(target_dir)
コード例 #7
0
ファイル: repos.py プロジェクト: nuxeh/ybd
def get_tree(this):
    ref = this['ref']
    gitdir = os.path.join(app.config['gits'], get_repo_name(this['repo']))
    if this['repo'].startswith('file://') or this['repo'].startswith('/'):
        gitdir = this['repo'].replace('file://', '')
        if not os.path.isdir(gitdir):
            app.exit(this, 'ERROR: git repo not found:', this['repo'])

    if not os.path.exists(gitdir):
        try:
            params = {'repo': get_repo_url(this['repo']), 'ref': ref}
            r = requests.get(url=app.config['tree-server'], params=params)
            return r.json()['tree']
        except:
            if app.config.get('tree-server'):
                app.log(this, 'WARNING: no tree from tree-server for', ref)

        mirror(this['name'], this['repo'])

    with app.chdir(gitdir), open(os.devnull, "w") as fnull:
        if call(['git', 'rev-parse', ref + '^{object}'], stdout=fnull,
                stderr=fnull):
            # can't resolve this ref. is it upstream?
            app.log(this, 'Fetching from upstream to resolve %s' % ref)
            update_mirror(this['name'], this['repo'], gitdir)

        try:
            tree = check_output(['git', 'rev-parse', ref + '^{tree}'],
                                universal_newlines=True)[0:-1]
            return tree

        except:
            # either we don't have a git dir, or ref is not unique
            # or ref does not exist
            app.exit(this, 'ERROR: could not find tree for ref', (ref, gitdir))
コード例 #8
0
ファイル: sandbox.py プロジェクト: leeming/ybd
def setup(this):
    tempfile.tempdir = app.config['tmp']
    this['sandbox'] = tempfile.mkdtemp()
    os.environ['TMPDIR'] = app.config['tmp']
    app.config['sandboxes'] += [this['sandbox']]
    this['build'] = os.path.join(this['sandbox'], this['name'] + '.build')
    this['install'] = os.path.join(this['sandbox'], this['name'] + '.inst')
    this['baserockdir'] = os.path.join(this['install'], 'baserock')
    this['tmp'] = os.path.join(this['sandbox'], 'tmp')
    for directory in ['build', 'install', 'tmp', 'baserockdir']:
        os.makedirs(this[directory])
    this['log'] = os.path.join(app.config['artifacts'],
                               this['cache'] + '.build-log')
    if app.config.get('instances'):
        this['log'] += '.' + str(app.config.get('fork', 0))
    assembly_dir = this['sandbox']
    for directory in ['dev', 'tmp']:
        call(['mkdir', '-p', os.path.join(assembly_dir, directory)])

    try:
        yield
    except app.RetryException as e:
        raise e
    except:
        import traceback
        app.log(this, 'ERROR: surprise exception in sandbox', '')
        traceback.print_exc()
        app.exit(this, 'ERROR: sandbox debris is at', this['sandbox'])
    finally:
        pass

    app.log(this, "Removing sandbox dir", this['sandbox'], verbose=True)
    app.remove_dir(this['sandbox'])
コード例 #9
0
ファイル: cache.py プロジェクト: rdale/ybd
def cache_key(defs, this):
    definition = defs.get(this)
    if definition is None:
        app.exit(this, 'ERROR: No definition found for', this)

    if definition.get('cache') == 'calculating':
        app.exit(this, 'ERROR: recursion loop for', this)

    if definition.get('cache'):
        return definition['cache']

    if definition.get('arch', app.config['arch']) != app.config['arch']:
        return False

    definition['cache'] = 'calculating'

    if definition.get('repo') and not definition.get('tree'):
        definition['tree'] = repos.get_tree(definition)

    hash_factors = {'arch': app.config['arch']}

    for factor in definition.get('build-depends', []):
        hash_factors[factor] = cache_key(defs, factor)

    for factor in definition.get('contents', []):
        hash_factors[factor] = cache_key(defs, factor)

    for factor in ['tree'] + defs.defaults.build_steps:
        if definition.get(factor):
            hash_factors[factor] = definition[factor]

    def hash_system_recursively(system):
        factor = system.get('path', 'BROKEN')
        hash_factors[factor] = cache_key(defs, factor)
        for subsystem in system.get('subsystems', []):
            hash_system_recursively(subsystem)

    if definition.get('kind') == 'cluster':
        for system in definition.get('systems', []):
            hash_system_recursively(system)

    result = json.dumps(hash_factors, sort_keys=True).encode('utf-8')

    safename = definition['name'].replace('/', '-')
    definition['cache'] = safename + "." + hashlib.sha256(result).hexdigest()
    app.config['total'] += 1
    if not get_cache(defs, this) and definition.get('kind') != 'cluster':
        app.config['tasks'] += 1
    app.log(definition, 'Cache_key is', definition['cache'])

    # If you want to catalog the artifacts for a system, do so
    if app.config.get('cache-log'):
        cache_list[definition.get('name')] = definition.get('cache')
        if definition.get('kind') == 'system':
            with open(app.config.get('cache-log'), 'w') as f:
                f.write(json.dumps(cache_list, indent=4))
            app.log('cache-log', 'cache logged to',
                    app.config.get('cache-log'))

    return definition['cache']
コード例 #10
0
ファイル: cache.py プロジェクト: nowster/ybd
def cache(defs, this, full_root=False):
    if get_cache(defs, this):
        app.log(this, "Bah! I could have cached", cache_key(defs, this))
        return
    tempfile.tempdir = app.config['tmp']
    tmpdir = tempfile.mkdtemp()
    cachefile = os.path.join(tmpdir, cache_key(defs, this))
    if full_root:
        utils.hardlink_all_files(this['install'], this['sandbox'])
        shutil.rmtree(this['install'])
        shutil.rmtree(this['build'])
        utils.set_mtime_recursively(this['sandbox'])
        utils.make_deterministic_tar_archive(cachefile, this['sandbox'])
        os.rename('%s.tar' % cachefile, cachefile)
    else:
        utils.set_mtime_recursively(this['install'])
        utils.make_deterministic_gztar_archive(cachefile, this['install'])
        os.rename('%s.tar.gz' % cachefile, cachefile)

    unpackdir = cachefile + '.unpacked'
    os.makedirs(unpackdir)
    if call(['tar', 'xf', cachefile, '--directory', unpackdir]):
        app.exit(this, 'ERROR: Problem unpacking', cachefile)

    try:
        target = os.path.join(app.config['artifacts'], cache_key(defs, this))
        os.rename(tmpdir, target)
        size = os.path.getsize(get_cache(defs, this))
        app.log(this, 'Now cached %s bytes as' % size, cache_key(defs, this))
    except:
        app.log(this, 'Bah! I raced and rebuilt', cache_key(defs, this))
コード例 #11
0
ファイル: concourse.py プロジェクト: leeming/ybd
def write_pipeline(defs, target):
    target = defs.get(target)
    config = {
        'run': {
            'path': 'ybd',
            'args': []
        },
        'platform': 'linux',
        'image': 'docker:///devcurmudgeon/foo'
    }

    aggregate = []
    passed = []
    for it in target.get('contents', []) + target.get('build-depends', []):
        component = defs.get(it)
        if component.get('repo'):
            log('AGGREGATE', 'Adding aggregate for', component['name'])
            aggregate += [{'get': component['name']}]
        else:
            log('PASSED', 'Adding passed for', component['name'])
            aggregate += [{'get': component['name']}]
            passed += [component['name']]

    plan = [{'task': 'Build', 'config': config}, {'aggregate': aggregate}]
    job = {'name': target['name'], 'plan': plan, 'passed': passed}
    pipeline = {'resources': inputs(defs, target), 'jobs': [job]}

    output = './pipeline.yml'
    with open(output, 'w') as f:
        f.write(yaml.dump(pipeline, default_flow_style=False))

    exit('CONCOURSE', 'pipeline is at', output)
コード例 #12
0
ファイル: sandbox.py プロジェクト: locallycompact/ybd
def run_logged(this, cmd_list):
    app.log_env(this["log"], os.environ, argv_to_string(cmd_list))
    with open(this["log"], "a") as logfile:
        if call(cmd_list, stdin=PIPE, stdout=logfile, stderr=logfile):
            app.log(this, "ERROR: command failed in directory %s:\n\n" % os.getcwd(), argv_to_string(cmd_list))
            call(["tail", "-n", "200", this["log"]])
            app.exit(this, "ERROR: log file is at", this["log"])
コード例 #13
0
ファイル: repos.py プロジェクト: mwilliams-ct/ybd
def mirror(name, repo):
    tempfile.tempdir = app.config['tmp']
    tmpdir = tempfile.mkdtemp()
    repo_url = get_repo_url(repo)
    try:
        tar_file = get_repo_name(repo_url) + '.tar'
        app.log(name, 'Try fetching tarball %s' % tar_file)
        # try tarball first
        with app.chdir(tmpdir), open(os.devnull, "w") as fnull:
            call(['wget', os.path.join(app.config['tar-url'], tar_file)])
            call(['tar', 'xf', tar_file], stderr=fnull)
            os.remove(tar_file)
            call(['git', 'config', 'remote.origin.url', repo_url])
            call(['git', 'config', 'remote.origin.mirror', 'true'])
            if call(['git', 'config', 'remote.origin.fetch',
                     '+refs/*:refs/*']) != 0:
                raise BaseException('Did not get a valid git repo')
            call(['git', 'fetch', 'origin'])
    except:
        app.log(name, 'Try git clone from', repo_url)
        with open(os.devnull, "w") as fnull:
            if call(['git', 'clone', '--mirror', '-n', repo_url, tmpdir]):
                app.exit(name, 'ERROR: failed to clone', repo)

    with app.chdir(tmpdir):
        if call(['git', 'rev-parse']):
            app.exit(name, 'ERROR: problem mirroring git repo at', tmpdir)

    gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
    try:
        os.rename(tmpdir, gitdir)
        app.log(name, 'Git repo is mirrored at', gitdir)
    except:
        pass
コード例 #14
0
ファイル: repos.py プロジェクト: mwilliams-ct/ybd
def checkout(name, repo, ref, checkout):
    gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
    if not os.path.exists(gitdir):
        mirror(name, repo)
    elif not mirror_has_ref(gitdir, ref):
        update_mirror(name, repo, gitdir)
    # checkout the required version of this from git
    with open(os.devnull, "w") as fnull:
        # We need to pass '--no-hardlinks' because right now there's nothing to
        # stop the build from overwriting the files in the .git directory
        # inside the sandbox. If they were hardlinks, it'd be possible for a
        # build to corrupt the repo cache. I think it would be faster if we
        # removed --no-hardlinks, though.
        if call(['git', 'clone', '--no-hardlinks', gitdir, checkout],
                stdout=fnull, stderr=fnull):
            app.exit(name, 'ERROR: git clone failed for', ref)

        with app.chdir(checkout):
            if call(['git', 'checkout', '--force', ref], stdout=fnull,
                    stderr=fnull):
                app.exit(name, 'ERROR: git checkout failed for', ref)

            app.log(name, 'Git checkout %s in %s' % (repo, checkout))
            app.log(name, 'Upstream version %s' % get_version(checkout, ref))

            if os.path.exists('.gitmodules'):
                checkout_submodules(name, ref)

    utils.set_mtime_recursively(checkout)
コード例 #15
0
ファイル: repos.py プロジェクト: locallycompact/ybd
def extract_commit(name, repo, ref, target_dir):
    '''Check out a single commit (or tree) from a Git repo.
    The checkout() function actually clones the entire repo, so this
    function is much quicker when you don't need to copy the whole repo into
    target_dir.
    '''
    if app.config['concourse-user']:
        gitdir = os.path.join(os.getcwd(), "..", name)
        app.log(name, 'in extract_commit, concourse-user gitdir is'+gitdir, gitdir)
    else:
        gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
    if not os.path.exists(gitdir):
        app.log(name, 'in extract_commit, concourse-user gitdir does not exist '+gitdir, gitdir)
        mirror(name, repo)
    elif not mirror_has_ref(gitdir, ref):
        update_mirror(name, repo, gitdir)

    with tempfile.NamedTemporaryFile() as git_index_file:
        git_env = os.environ.copy()
        git_env['GIT_INDEX_FILE'] = git_index_file.name
        git_env['GIT_WORK_TREE'] = target_dir

        app.log(name, 'Extracting commit', ref)
        if call(['git', 'read-tree', ref], env=git_env, cwd=gitdir):
            app.exit(name, 'ERROR: git read-tree failed for', ref)
        app.log(name, 'Then checkout index', ref)
        if call(['git', 'checkout-index', '--all'], env=git_env, cwd=gitdir):
            app.exit(name, 'ERROR: git checkout-index failed for', ref)
        app.log(name, 'Done', ref)

    utils.set_mtime_recursively(target_dir)
コード例 #16
0
ファイル: sandbox.py プロジェクト: ColdrickSotK/ybd
def run_logged(this, cmd_list):
    app.log_env(this['log'], os.environ, argv_to_string(cmd_list))
    with open(this['log'], "a") as logfile:
        if call(cmd_list, stdin=PIPE, stdout=logfile, stderr=logfile):
            app.log(this, 'ERROR: command failed in directory %s:\n\n' %
                    os.getcwd(), argv_to_string(cmd_list))
            app.exit(this, 'ERROR: log file is at', this['log'])
コード例 #17
0
ファイル: repos.py プロジェクト: padrigali/ybd
def get_tree(this):
    ref = this['ref']
    gitdir = os.path.join(app.config['gits'], get_repo_name(this['repo']))

    if not os.path.exists(gitdir):
        try:
            url = (app.config['tree-server'] + 'repo=' +
                   get_repo_url(this['repo']) + '&ref=' + ref)
            response = requests.get(url=url)
            tree = response.json()['tree']
            return tree
        except:
            app.log(this, 'WARNING: no tree from tree-server', ref)
            mirror(this['name'], this['repo'])

    with app.chdir(gitdir), open(os.devnull, "w") as fnull:
        if call(['git', 'rev-parse', ref + '^{object}'], stdout=fnull,
                stderr=fnull):
            # can't resolve this ref. is it upstream?
            app.log(this, 'Fetching from upstream to resolve %s' % ref)
            call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)

        try:
            tree = check_output(['git', 'rev-parse', ref + '^{tree}'],
                                universal_newlines=True)[0:-1]
            return tree

        except:
            # either we don't have a git dir, or ref is not unique
            # or ref does not exist
            app.exit(this, 'ERROR: could not find tree for ref', (ref, gitdir))
コード例 #18
0
ファイル: repos.py プロジェクト: rdale/ybd
def extract_commit(name, repo, ref, target_dir):
    """Check out a single commit (or tree) from a Git repo.
    The checkout() function actually clones the entire repo, so this
    function is much quicker when you don't need to copy the whole repo into
    target_dir.
    """
    gitdir = os.path.join(app.config["gits"], get_repo_name(repo))
    if not os.path.exists(gitdir):
        mirror(name, repo)
    elif not mirror_has_ref(gitdir, ref):
        update_mirror(name, repo, gitdir)

    with tempfile.NamedTemporaryFile() as git_index_file:
        git_env = os.environ.copy()
        git_env["GIT_INDEX_FILE"] = git_index_file.name
        git_env["GIT_WORK_TREE"] = target_dir

        app.log(name, "Extracting commit", ref)
        if call(["git", "read-tree", ref], env=git_env, cwd=gitdir):
            app.exit(name, "ERROR: git read-tree failed for", ref)
        app.log(name, "Then checkout index", ref)
        if call(["git", "checkout-index", "--all"], env=git_env, cwd=gitdir):
            app.exit(name, "ERROR: git checkout-index failed for", ref)
        app.log(name, "Done", ref)

    utils.set_mtime_recursively(target_dir)
コード例 #19
0
ファイル: repos.py プロジェクト: padrigali/ybd
def mirror(name, repo):
    tempfile.tempdir = app.config['tmp']
    tmpdir = tempfile.mkdtemp()
    repo_url = get_repo_url(repo)
    try:
        os.makedirs(tmpdir)
        tar_file = get_repo_name(repo_url) + '.tar'
        app.log(name, 'Try fetching tarball %s' % tar_file)
        # try tarball first
        with app.chdir(tmpdir), open(os.devnull, "w") as fnull:
            call(['wget', os.path.join(app.config['tar-url'], tar_file)])
            call(['tar', 'xf', tar_file])
            os.remove(tar_file)
            call(['git', 'config', 'remote.origin.url', repo_url])
            call(['git', 'config', 'remote.origin.mirror', 'true'])
            if call(['git', 'config', 'remote.origin.fetch',
                     '+refs/*:refs/*']) != 0:
                raise BaseException('Did not get a valid git repo')
            call(['git', 'fetch', 'origin'])
    except:
        app.log(name, 'Try git clone from', repo_url)
        with open(os.devnull, "w") as fnull:
            if call(['git', 'clone', '--mirror', '-n', repo_url, tmpdir]):
                app.exit(name, 'ERROR: failed to clone', repo)

    with app.chdir(tmpdir):
        if call(['git', 'rev-parse']):
            app.exit(name, 'ERROR: problem mirroring git repo at', tmpdir)

    gitdir = os.path.join(app.config['gits'], get_repo_name(repo))
    try:
        os.rename(tmpdir, gitdir)
        app.log(name, 'Git repo is mirrored at', gitdir)
    except:
        pass
コード例 #20
0
ファイル: cache.py プロジェクト: gtristan/ybd
def upload(defs, this):
    cachefile = get_cache(defs, this)
    url = app.config['kbas-url'] + 'upload'
    params = {"filename": this['cache'],
              "password": app.config['kbas-password'],
              "checksum": md5(cachefile)}
    with open(cachefile, 'rb') as f:
        try:
            response = requests.post(url=url, data=params, files={"file": f})
            if response.status_code == 201:
                app.log(this, 'Uploaded %s to' % this['cache'], url)
                return
            if response.status_code == 777:
                app.log(this, 'Reproduced %s at' % md5(cachefile),
                        this['cache'])
                app.config['reproduced'].append([md5(cachefile),
                                                 this['cache']])
                return
            if response.status_code == 405:
                # server has different md5 for this artifact
                if this['kind'] == 'stratum':
                    app.exit('BIT-FOR-BIT',
                             'ERROR: stratum reproduction failed for',
                             this['cache'])
                app.log(this, 'Artifact server already has', this['cache'])
                return
            app.log(this, 'Artifact server problem:', response.status_code)
        except:
            pass
        app.log(this, 'Failed to upload', this['cache'])
コード例 #21
0
ファイル: repos.py プロジェクト: gtristan/ybd
def update_mirror(name, repo, gitdir):
    with app.chdir(gitdir), open(os.devnull, "w") as fnull:
        app.log(name, 'Refreshing mirror for %s' % repo)
        repo_url = get_repo_url(repo)
        if call(['git', 'fetch', repo_url, '+refs/*:refs/*', '--prune'],
                stdout=fnull, stderr=fnull):
            app.exit(name, 'ERROR: git update mirror failed', repo)
コード例 #22
0
ファイル: concourse.py プロジェクト: mwilliams-ct/ybd
def write_pipeline(defs, target):
    target = defs.get(target)
    build = {}
    build['path'] = 'ybd'
    build['args'] = []
    config = {}
    config['run'] = build
    config['platform'] = 'linux'
    config['image'] = "docker:///devcurmudgeon/foo"

    pipeline = {}
    pipeline['resources'] = inputs(defs, target)

    aggregate = []
    for it in target.get('contents', []) + target.get('build-depends', []):
        component = defs.get(it)
        aggregate += [dict(get=component['name'])]

    plan = [dict(task='build', config=config), dict(aggregate=aggregate)]
    job = dict(name=os.path.basename(app.config['target']), plan=plan)
    pipeline['jobs'] = [job]

    output = './pipeline.yml'
    with open(output, 'w') as f:
        f.write(yaml.dump(pipeline,
                default_flow_style=False))

    app.exit('CONCOURSE', 'pipeline is at', output)
コード例 #23
0
ファイル: sandbox.py プロジェクト: nuxeh/ybd
def setup(this):
    currentdir = os.getcwd()
    tempfile.tempdir = app.config['tmp']
    this['sandbox'] = tempfile.mkdtemp()
    os.environ['TMPDIR'] = app.config['tmp']
    app.config['sandboxes'] += [this['sandbox']]
    this['build'] = os.path.join(this['sandbox'], this['name'] + '.build')
    this['install'] = os.path.join(this['sandbox'], this['name'] + '.inst')
    this['baserockdir'] = os.path.join(this['install'], 'baserock')
    this['tmp'] = os.path.join(this['sandbox'], 'tmp')
    for directory in ['build', 'install', 'tmp', 'baserockdir']:
        os.makedirs(this[directory])
    this['log'] = os.path.join(app.config['artifacts'],
                               this['cache'] + '.build-log')
    if app.config.get('instances'):
        this['log'] += '.' + str(app.config.get('fork', 0))
    assembly_dir = this['sandbox']
    for directory in ['dev', 'tmp']:
        call(['mkdir', '-p', os.path.join(assembly_dir, directory)])

    try:
        yield
    except app.RetryException as e:
        raise e
    except:
        import traceback
        app.log(this, 'ERROR: a surprise exception happened', '')
        traceback.print_exc()
        app.exit(this, 'ERROR: sandbox debris is at', this['sandbox'])
    finally:
        pass

    if app.config.get('log-verbose'):
        app.log(this, "Removing sandbox dir", this['sandbox'])
    app.remove_dir(this['sandbox'])
コード例 #24
0
ファイル: repos.py プロジェクト: rdale/ybd
def mirror(name, repo):
    tempfile.tempdir = app.config["tmp"]
    tmpdir = tempfile.mkdtemp()
    repo_url = get_repo_url(repo)
    try:
        tar_file = get_repo_name(repo_url) + ".tar"
        app.log(name, "Try fetching tarball %s" % tar_file)
        # try tarball first
        with app.chdir(tmpdir), open(os.devnull, "w") as fnull:
            call(["wget", os.path.join(app.config["tar-url"], tar_file)])
            call(["tar", "xf", tar_file], stderr=fnull)
            os.remove(tar_file)
            call(["git", "config", "remote.origin.url", repo_url])
            call(["git", "config", "remote.origin.mirror", "true"])
            if call(["git", "config", "remote.origin.fetch", "+refs/*:refs/*"]) != 0:
                raise BaseException("Did not get a valid git repo")
            call(["git", "fetch", "origin"])
    except:
        app.log(name, "Try git clone from", repo_url)
        with open(os.devnull, "w") as fnull:
            if call(["git", "clone", "--mirror", "-n", repo_url, tmpdir]):
                app.exit(name, "ERROR: failed to clone", repo)

    with app.chdir(tmpdir):
        if call(["git", "rev-parse"]):
            app.exit(name, "ERROR: problem mirroring git repo at", tmpdir)

    gitdir = os.path.join(app.config["gits"], get_repo_name(repo))
    try:
        os.rename(tmpdir, gitdir)
        app.log(name, "Git repo is mirrored at", gitdir)
    except:
        pass
コード例 #25
0
def update_mirror(name, repo, gitdir):
    with app.chdir(gitdir), open(os.devnull, "w") as fnull:
        app.log(name, 'Refreshing mirror for %s' % repo)
        repo_url = get_repo_url(repo)
        if call(['git', 'fetch', repo_url, '+refs/*:refs/*', '--prune'],
                stdout=fnull,
                stderr=fnull):
            app.exit(name, 'ERROR: git update mirror failed', repo)
コード例 #26
0
ファイル: sandbox.py プロジェクト: mwilliams-ct/ybd
def run_logged(this, cmd_list):
    app.log_env(this['log'], os.environ, argv_to_string(cmd_list))
    with open(this['log'], "a") as logfile:
        if call(cmd_list, stdin=PIPE, stdout=logfile, stderr=logfile):
            app.log(this,
                    'ERROR: command failed in directory %s:\n\n' % os.getcwd(),
                    argv_to_string(cmd_list))
            call(['tail', '-n', '200', this['log']])
            app.exit(this, 'ERROR: log file is at', this['log'])
コード例 #27
0
ファイル: definitions.py プロジェクト: jamespthomas/ybd
 def _fix_path_name(self, this, name='ERROR'):
     if this.get('path', None) is None:
         this['path'] = this.pop('morph', this.get('name', name))
         if this['path'] == 'ERROR':
             app.exit(this, 'ERROR: no path, no name?')
     if this.get('name') is None:
         this['name'] = this['path'].replace('/', '-')
     if this['name'] == app.settings['target']:
         app.settings['target'] = this['path']
コード例 #28
0
ファイル: cache.py プロジェクト: ColdrickSotK/ybd
def unpack(defs, this):
    cachefile = get_cache(defs, this)
    if cachefile:
        unpackdir = cachefile + '.unpacked'
        if not os.path.exists(unpackdir):
            os.makedirs(unpackdir)
            if call(['tar', 'xf', cachefile, '--directory', unpackdir]):
                app.exit(this, 'ERROR: Problem unpacking', cachefile)
        return unpackdir

    app.exit(this, 'ERROR: Cached artifact not found')
コード例 #29
0
ファイル: wrangler.py プロジェクト: gtristan/ybd
def wrangle(source, output):
    format = detect_format(source)
    if format is not None and output != '/' and os.path.isdir(output):
        shutil.rmtree(output)
    if format == 'baserock-morphologies':
        app.log('WRANGLER', 'baserock morphs found in', source)
        wrangle_morphs(source, output)
    elif format == 'cida-definitions':
        app.log('WRANGLER', 'cida files found in', source)
        wrangle_cida(source, output)
    else:
        app.exit('WRANGLER', 'ERROR: no definitions|recipes found in', source)
コード例 #30
0
ファイル: definitions.py プロジェクト: gtristan/ybd
    def validate_schema(self, schemas, data):
        if schemas == {} or \
                app.config.get('schema-validation', False) is False:
            return
        try:
            jsonschema.validate(data, schemas[data.get('kind', None)])
        except jsonschema.exceptions.ValidationError as e:
            if app.config.get('schema-validation') == 'strict':
                app.exit(data, 'ERROR: schema validation failed:\n', e)

            app.log(data, 'WARNING: schema validation failed:')
            print e
コード例 #31
0
ファイル: wrangler.py プロジェクト: mwilliams-ct/ybd
def wrangle(source, output):
    format = detect_format(source)
    if format is not None and output != '/' and os.path.isdir(output):
        shutil.rmtree(output)
    if format == 'baserock-morphologies':
        app.log('WRANGLER', 'baserock morphs found in', source)
        wrangle_morphs(source, output)
    elif format == 'cida-definitions':
        app.log('WRANGLER', 'cida files found in', source)
        wrangle_cida(source, output)
    else:
        app.exit('WRANGLER', 'ERROR: no definitions|recipes found in', source)
コード例 #32
0
    def validate_schema(self, schemas, data):
        if schemas == {} or \
                config.get('schema-validation', False) is False:
            return
        try:
            jsonschema.validate(data, schemas[data.get('kind', None)])
        except jsonschema.exceptions.ValidationError as e:
            if config.get('schema-validation') == 'strict':
                exit(data, 'ERROR: schema validation failed:\n', e)

            log(data, 'WARNING: schema validation failed:')
            print e
コード例 #33
0
ファイル: sandbox.py プロジェクト: leeming/ybd
def install(defs, this, component):
    # populate this['sandbox'] with the artifact files from component
    if os.path.exists(os.path.join(this['sandbox'], 'baserock',
                                   component['name'] + '.meta')):
        return
    app.log(this, 'Sandbox: installing %s' % component['cache'], verbose=True)
    if cache.get_cache(defs, component) is False:
        app.exit(this, 'ERROR: unable to get cache for', component['name'])
    unpackdir = cache.get_cache(defs, component) + '.unpacked'
    if this.get('kind') is 'system':
        utils.copy_all_files(unpackdir, this['sandbox'])
    else:
        utils.hardlink_all_files(unpackdir, this['sandbox'])
コード例 #34
0
ファイル: sandbox.py プロジェクト: locallycompact/ybd
def install(defs, this, component):
    # populate this['sandbox'] with the artifact files from component
    if os.path.exists(os.path.join(this["sandbox"], "baserock", component["name"] + ".meta")):
        return
    if app.config.get("log-verbose"):
        app.log(this, "Installing %s" % component["cache"])
    if cache.get_cache(defs, component) is False:
        app.exit(this, "ERROR: unable to get cache for", component["name"])
    unpackdir = cache.get_cache(defs, component) + ".unpacked"
    if this.get("kind") is "system":
        utils.copy_all_files(unpackdir, this["sandbox"])
    else:
        utils.hardlink_all_files(unpackdir, this["sandbox"])
コード例 #35
0
    def _load(self, path):
        '''Load a single definition file as a dict.

        The file is assumed to be yaml, and we insert the provided path into
        the dict keyed as 'path'.

        '''
        try:
            with open(path) as f:
                text = f.read()
            contents = yaml.safe_load(text)
        except yaml.YAMLError, exc:
            exit('DEFINITIONS', 'ERROR: could not parse %s' % path, exc)
コード例 #36
0
ファイル: sandbox.py プロジェクト: leeming/ybd
def install(defs, this, component):
    # populate this['sandbox'] with the artifact files from component
    if os.path.exists(
            os.path.join(this['sandbox'], 'baserock',
                         component['name'] + '.meta')):
        return
    app.log(this, 'Sandbox: installing %s' % component['cache'], verbose=True)
    if cache.get_cache(defs, component) is False:
        app.exit(this, 'ERROR: unable to get cache for', component['name'])
    unpackdir = cache.get_cache(defs, component) + '.unpacked'
    if this.get('kind') is 'system':
        utils.copy_all_files(unpackdir, this['sandbox'])
    else:
        utils.hardlink_all_files(unpackdir, this['sandbox'])
コード例 #37
0
ファイル: cache.py プロジェクト: padrigali/ybd
def unpack(defs, this, tmpfile):
    unpackdir = tmpfile + ".unpacked"
    os.makedirs(unpackdir)
    if call(["tar", "xf", tmpfile, "--directory", unpackdir]):
        app.exit(this, "ERROR: Problem unpacking", tmpfile)

    try:
        path = os.path.join(app.config["artifacts"], cache_key(defs, this))
        os.rename(os.path.dirname(tmpfile), path)
        size = os.path.getsize(get_cache(defs, this))
        app.log(this, "Now cached %s bytes as" % size, cache_key(defs, this))
        return path
    except:
        app.log(this, "Bah! I raced on", cache_key(defs, this))
        shutil.rmtree(os.path.dirname(tmpfile))
        return False
コード例 #38
0
ファイル: cache.py プロジェクト: nowster/ybd
def cache_key(defs, this):
    definition = defs.get(this)
    if definition is None:
        app.exit(this, 'ERROR: No definition found for', this)

    if definition.get('cache') == 'calculating':
        app.exit(this, 'ERROR: recursion loop for', this)

    if definition.get('cache'):
        return definition['cache']

    definition['cache'] = 'calculating'

    if definition.get('repo') and not definition.get('tree'):
        definition['tree'] = repos.get_tree(definition)

    hash_factors = {'arch': app.config['arch']}

    for factor in definition.get('build-depends', []):
        hash_factors[factor] = cache_key(defs, factor)

    for factor in definition.get('contents', []):
        hash_factors[factor] = cache_key(defs, factor)

    for factor in ['tree'] + defs.defaults.build_steps:
        if definition.get(factor):
            hash_factors[factor] = definition[factor]

    def hash_system_recursively(system):
        factor = system.get('path', 'BROKEN')
        hash_factors[factor] = cache_key(defs, factor)
        for subsystem in system.get('subsystems', []):
            hash_system_recursively(subsystem)

    if definition.get('kind') == 'cluster':
        for system in definition.get('systems', []):
            hash_system_recursively(system)

    result = json.dumps(hash_factors, sort_keys=True).encode('utf-8')

    safename = definition['name'].replace('/', '-')
    definition['cache'] = safename + "." + hashlib.sha256(result).hexdigest()
    app.config['total'] += 1
    if not get_cache(defs, this):
        app.config['tasks'] += 1
    app.log(definition, 'Cache_key is', definition['cache'])
    return definition['cache']
コード例 #39
0
ファイル: cache.py プロジェクト: padrigali/ybd
def cache_key(defs, this):
    definition = defs.get(this)
    if definition is None:
        app.exit(this, "ERROR: No definition found for", this)

    if definition.get("cache") == "calculating":
        app.exit(this, "ERROR: recursion loop for", this)

    if definition.get("cache"):
        return definition["cache"]

    definition["cache"] = "calculating"

    if definition.get("repo") and not definition.get("tree"):
        definition["tree"] = repos.get_tree(definition)

    hash_factors = {"arch": app.config["arch"]}

    for factor in definition.get("build-depends", []):
        hash_factors[factor] = cache_key(defs, factor)

    for factor in definition.get("contents", []):
        hash_factors[factor] = cache_key(defs, factor)

    for factor in ["tree"] + defs.defaults.build_steps:
        if definition.get(factor):
            hash_factors[factor] = definition[factor]

    def hash_system_recursively(system):
        factor = system.get("path", "BROKEN")
        hash_factors[factor] = cache_key(defs, factor)
        for subsystem in system.get("subsystems", []):
            hash_system_recursively(subsystem)

    if definition.get("kind") == "cluster":
        for system in definition.get("systems", []):
            hash_system_recursively(system)

    result = json.dumps(hash_factors, sort_keys=True).encode("utf-8")

    safename = definition["name"].replace("/", "-")
    definition["cache"] = safename + "." + hashlib.sha256(result).hexdigest()
    app.config["total"] += 1
    if not get_cache(defs, this):
        app.config["tasks"] += 1
    app.log(definition, "Cache_key is", definition["cache"])
    return definition["cache"]
コード例 #40
0
ファイル: assembly.py プロジェクト: leeming/ybd
def claim(defs, this):
    with open(lockfile(defs, this), 'a') as l:
        try:
            fcntl.flock(l, fcntl.LOCK_EX | fcntl.LOCK_NB)
        except Exception as e:
            if e.errno in (errno.EACCES, errno.EAGAIN):
                # flock() will report EACCESS or EAGAIN when the lock fails.
                raise RetryException(defs, this)
            else:
                log(this, 'ERROR: surprise exception in assembly', '')
                import traceback
                traceback.print_exc()
                exit(this, 'ERROR: sandbox debris is at', this['sandbox'])
        try:
            yield
        finally:
            if os.path.isfile(lockfile(defs, this)):
                os.remove(lockfile(defs, this))
コード例 #41
0
ファイル: assembly.py プロジェクト: leeming/ybd
def claim(defs, this):
    with open(lockfile(defs, this), 'a') as l:
        try:
            fcntl.flock(l, fcntl.LOCK_EX | fcntl.LOCK_NB)
        except Exception as e:
            if e.errno in (errno.EACCES, errno.EAGAIN):
                # flock() will report EACCESS or EAGAIN when the lock fails.
                raise RetryException(defs, this)
            else:
                log(this, 'ERROR: surprise exception in assembly', '')
                import traceback
                traceback.print_exc()
                exit(this, 'ERROR: sandbox debris is at', this['sandbox'])
        try:
            yield
        finally:
            if os.path.isfile(lockfile(defs, this)):
                os.remove(lockfile(defs, this))
コード例 #42
0
ファイル: cache.py プロジェクト: gtristan/ybd
def cache_key(defs, this):
    definition = defs.get(this)
    if definition is None:
        app.exit(this, 'ERROR: No definition found for', this)

    if definition.get('cache') == 'calculating':
        app.exit(this, 'ERROR: recursion loop for', this)

    if definition.get('cache'):
        return definition['cache']

    if definition.get('arch', app.config['arch']) != app.config['arch']:
        return False

    definition['cache'] = 'calculating'

    if definition.get('repo') and not definition.get('tree'):
        definition['tree'] = repos.get_tree(definition)

    factors = hash_factors(defs, definition)
    factors = json.dumps(factors, sort_keys=True).encode('utf-8')
    key = hashlib.sha256(factors).hexdigest()
    if app.config.get('mode', 'normal') == 'no-build':
        key = 'no-build'

    definition['cache'] = definition['name'] + "." + key

    app.config['total'] += 1
    if not get_cache(defs, this):
        app.config['tasks'] += 1

    app.log(definition, 'Cache_key is', definition['cache'])

    # If you want to catalog the artifacts for a system, do so
    if app.config.get('cache-log'):
        cache_list[definition.get('name')] = definition.get('cache')
        if definition.get('kind') == 'system':
            with open(app.config.get('cache-log'), 'w') as f:
                f.write(json.dumps(cache_list, indent=4))
            app.log('cache-log', 'cache logged to',
                    app.config.get('cache-log'))

    app.config['keys'] += [definition['cache']]
    return definition['cache']
コード例 #43
0
ファイル: repos.py プロジェクト: gtristan/ybd
def checkout_submodules(this):
    app.log(this, 'Checking git submodules')
    with open('.gitmodules', "r") as gitfile:
        # drop indentation in sections, as RawConfigParser cannot handle it
        content = '\n'.join([l.strip() for l in gitfile.read().splitlines()])
    io = StringIO(content)
    parser = RawConfigParser()
    parser.readfp(io)

    for section in parser.sections():
        # validate section name against the 'submodule "foo"' pattern
        submodule = re.sub(r'submodule "(.*)"', r'\1', section)
        path = parser.get(section, 'path')
        try:
            url = this['submodules'][path]['url']
            app.log(this, 'Processing submodule %s from' % path, url)
        except:
            url = parser.get(section, 'url')
            app.log(this, 'WARNING: fallback to submodule %s from' % path, url)

        try:
            # list objects in the parent repo tree to find the commit
            # object that corresponds to the submodule
            commit = check_output(['git', 'ls-tree', this['ref'], path])

            # read the commit hash from the output
            fields = commit.split()
            if len(fields) >= 2 and fields[1] == 'commit':
                submodule_commit = commit.split()[2]

                # fail if the commit hash is invalid
                if len(submodule_commit) != 40:
                    raise Exception

                fulldir = os.path.join(os.getcwd(), path)
                _checkout(this['name'], url, submodule_commit, fulldir)

            else:
                app.log(this, 'Skipping submodule %s, not a commit:' % path,
                        fields)

        except:
            app.exit(this, "ERROR: git submodules problem", "")
コード例 #44
0
def checkout_submodules(this):
    app.log(this, 'Checking git submodules')
    with open('.gitmodules', "r") as gitfile:
        # drop indentation in sections, as RawConfigParser cannot handle it
        content = '\n'.join([l.strip() for l in gitfile.read().splitlines()])
    io = StringIO(content)
    parser = RawConfigParser()
    parser.readfp(io)

    for section in parser.sections():
        # validate section name against the 'submodule "foo"' pattern
        submodule = re.sub(r'submodule "(.*)"', r'\1', section)
        path = parser.get(section, 'path')
        try:
            url = this['submodules'][path]['url']
            app.log(this, 'Processing submodule %s from' % path, url)
        except:
            url = parser.get(section, 'url')
            app.log(this, 'WARNING: fallback to submodule %s from' % path, url)

        try:
            # list objects in the parent repo tree to find the commit
            # object that corresponds to the submodule
            commit = check_output(['git', 'ls-tree', this['ref'], path])

            # read the commit hash from the output
            fields = commit.split()
            if len(fields) >= 2 and fields[1] == 'commit':
                submodule_commit = commit.split()[2]

                # fail if the commit hash is invalid
                if len(submodule_commit) != 40:
                    raise Exception

                fulldir = os.path.join(os.getcwd(), path)
                _checkout(this['name'], url, submodule_commit, fulldir)

            else:
                app.log(this, 'Skipping submodule %s, not a commit:' % path,
                        fields)

        except:
            app.exit(this, "ERROR: git submodules problem", "")
コード例 #45
0
ファイル: splitting.py プロジェクト: leeming/ybd
def check_overlaps(defs, component):
    if set(config['new-overlaps']) <= set(config['overlaps']):
        config['new-overlaps'] = []
        return

    overlaps_found = False
    config['new-overlaps'] = list(set(config['new-overlaps']))
    for path in config['new-overlaps']:
        log(component, 'WARNING: overlapping path', path)
        for filename in os.listdir(component['baserockdir']):
            with open(os.path.join(component['baserockdir'], filename)) as f:
                for line in f:
                    if path[1:] in line:
                        log(filename, 'WARNING: overlap at', path[1:])
                        overlaps_found = True
                        break
        if config.get('check-overlaps') == 'exit':
            exit(component, 'ERROR: overlaps found', config['new-overlaps'])
    config['overlaps'] = list(set(config['new-overlaps'] + config['overlaps']))
    config['new-overlaps'] = []
コード例 #46
0
ファイル: cache.py プロジェクト: leeming/ybd
def cache_key(defs, this):
    definition = defs.get(this)
    if definition is None:
        app.exit(this, 'ERROR: No definition found for', this)

    if definition.get('cache') == 'calculating':
        app.exit(this, 'ERROR: recursion loop for', this)

    if definition.get('cache'):
        return definition['cache']

    if definition.get('arch', app.config['arch']) != app.config['arch']:
        return False

    definition['cache'] = 'calculating'

    key = 'no-build'
    if app.config.get('mode', 'normal') in ['keys-only', 'normal']:
        if definition.get('repo') and not definition.get('tree'):
            definition['tree'] = get_tree(definition)
        factors = hash_factors(defs, definition)
        factors = json.dumps(factors, sort_keys=True).encode('utf-8')
        key = hashlib.sha256(factors).hexdigest()

    definition['cache'] = definition['name'] + "." + key

    app.config['total'] += 1
    x = 'x'
    if not get_cache(defs, this):
        x = ' '
        app.config['tasks'] += 1

    app.log('CACHE-KEYS', '[%s]' % x, definition['cache'])
    if app.config.get('manifest', False):
        update_manifest(defs, this, app.config['manifest'])

    if 'keys' in app.config:
        app.config['keys'] += [definition['cache']]
    return definition['cache']
コード例 #47
0
ファイル: cache.py プロジェクト: leeming/ybd
def cull(artifact_dir):
    tempfile.tempdir = app.config['tmp']
    deleted = 0

    def clear(deleted, artifact_dir):
        artifacts = utils.sorted_ls(artifact_dir)
        for artifact in artifacts:
            stat = os.statvfs(artifact_dir)
            free = stat.f_frsize * stat.f_bavail / 1000000000
            if free >= app.config.get('min-gigabytes', 10):
                app.log('SETUP', '%sGB is enough free space' % free)
                if deleted > 0:
                    app.log('SETUP', 'Culled %s items in' % deleted,
                            artifact_dir)
                return True
            path = os.path.join(artifact_dir, artifact)
            if os.path.exists(os.path.join(path, artifact + '.unpacked')):
                path = os.path.join(path, artifact + '.unpacked')
            if os.path.exists(path) and artifact not in app.config['keys']:
                tmpdir = tempfile.mkdtemp()
                shutil.move(path, os.path.join(tmpdir, 'to-delete'))
                app.remove_dir(tmpdir)
                deleted += 1
        return False

    # cull unpacked dirs first
    if clear(deleted, artifact_dir):
        return

    # cull artifacts
    if clear(deleted, artifact_dir):
        return

    stat = os.statvfs(artifact_dir)
    free = stat.f_frsize * stat.f_bavail / 1000000000
    if free < app.config.get('min-gigabytes', 10):
        app.exit('SETUP', 'ERROR: %sGB is less than min-gigabytes:' % free,
                 app.config.get('min-gigabytes', 10))
コード例 #48
0
ファイル: assembly.py プロジェクト: leeming/ybd
def get_build_commands(defs, this):
    '''Get commands specified in 'this', plus commands implied by build-system

    The containing definition may point to another definition file (using
    the 'path' field in YBD's internal data model) that contains build
    instructions, or it may only specify a predefined build system, using
    'build-system' field.

    The definition containing build instructions can specify a predefined
    build-system and then override some or all of the command sequences it
    defines.

    If the definition file doesn't exist and no build-system is specified,
    this function will scan the contents the checked-out source repo and try
    to autodetect what build system is used.

    '''

    if this.get('kind', None) == "system":
        # Systems must run their integration scripts as install commands
        this['install-commands'] = gather_integration_commands(defs, this)
        return

    if this.get('build-system') or os.path.exists(this['path']):
        bs = this.get('build-system', 'manual')
        log(this, 'Defined build system is', bs)
    else:
        files = os.listdir(this['build'])
        bs = defs.defaults.detect_build_system(files)
        if bs == 'NOT FOUND':
            exit(this, 'ERROR: no build-system detected,',
                 'and missing %s' % this['path'])
        log(this, 'WARNING: Autodetected build system', bs)

    for build_step in defs.defaults.build_steps:
        if this.get(build_step, None) is None:
            commands = defs.defaults.build_systems[bs].get(build_step, [])
            this[build_step] = commands
コード例 #49
0
ファイル: cache.py プロジェクト: leeming/ybd
def unpack(defs, this, tmpfile):
    unpackdir = tmpfile + '.unpacked'
    os.makedirs(unpackdir)
    if call(['tar', 'xf', tmpfile, '--directory', unpackdir]):
        app.log(this, 'Problem unpacking', tmpfile)
        shutil.rmtree(os.path.dirname(tmpfile))
        return False

    try:
        path = os.path.join(app.config['artifacts'], cache_key(defs, this))
        shutil.move(os.path.dirname(tmpfile), path)
        if not os.path.isdir(path):
            app.exit(this, 'ERROR: problem creating cache artifact', path)

        size = os.path.getsize(get_cache(defs, this))
        checksum = md5(get_cache(defs, this))
        app.log(this, 'Cached %s bytes %s as' % (size, checksum),
                cache_key(defs, this))
        return path
    except:
        app.log(this, 'Bah! I raced on', cache_key(defs, this))
        shutil.rmtree(os.path.dirname(tmpfile))
        return False
コード例 #50
0
    def _fix_keys(self, item):
        '''Normalizes keys for a definition dict and its contents

        Some definitions have a 'morph' field which is a relative path. Others
        only have a 'name' field, which has no directory part. A few do not
        have a 'name'

        This sets our key to be 'path', and fixes any missed 'name' to be
        the same as 'path' but replacing '/' by '-'

        '''
        if item.get('morph') and not os.path.isfile(item['morph']):
            log('DEFINITIONS', 'WARNING: missing definition', item['morph'])
        item.setdefault('path', item.pop('morph', item.get('name', None)))
        if item['path'] is None:
            exit(item, 'ERROR: no path, no name?')
        item.setdefault('name', item['path'])
        item['name'] = item['name'].replace('/', '-')
        if item['name'] == config['target']:
            config['target'] = item['path']

        for system in (item.get('systems', []) + item.get('subsystems', [])):
            self._fix_keys(system)
コード例 #51
0
ファイル: repos.py プロジェクト: mwilliams-ct/ybd
def get_tree(this):
    ref = this['ref']
    gitdir = os.path.join(app.config['gits'], get_repo_name(this['repo']))
    if this['repo'].startswith('file://') or this['repo'].startswith('/'):
        gitdir = this['repo'].replace('file://', '')
        if not os.path.isdir(gitdir):
            app.exit(this, 'ERROR: git repo not found:', this['repo'])

    if not os.path.exists(gitdir):
        try:
            url = (app.config['tree-server'] + 'repo=' +
                   get_repo_url(this['repo']) + '&ref=' + ref)
            response = requests.get(url=url)
            tree = response.json()['tree']
            return tree
        except:
            if app.config.get('tree-server'):
                app.log(this, 'WARNING: no tree from tree-server for', ref)

        mirror(this['name'], this['repo'])

    with app.chdir(gitdir), open(os.devnull, "w") as fnull:
        if call(['git', 'rev-parse', ref + '^{object}'], stdout=fnull,
                stderr=fnull):
            # can't resolve this ref. is it upstream?
            app.log(this, 'Fetching from upstream to resolve %s' % ref)
            call(['git', 'fetch', 'origin'], stdout=fnull, stderr=fnull)

        try:
            tree = check_output(['git', 'rev-parse', ref + '^{tree}'],
                                universal_newlines=True)[0:-1]
            return tree

        except:
            # either we don't have a git dir, or ref is not unique
            # or ref does not exist
            app.exit(this, 'ERROR: could not find tree for ref', (ref, gitdir))
コード例 #52
0
    def _fix_keys(self, definition, name='ERROR'):
        '''Normalizes keys for a definition dict and its contents

        Some definitions have a 'morph' field which is a relative path. Others
        only have a 'name' field, which has no directory part. A few do not
        have a 'name'

        This sets our key to be 'path', and fixes any missed 'name' to be
        the same as 'path' but replacing '/' by '-'

        '''
        if definition.get('path', None) is None:
            definition['path'] = definition.pop('morph',
                                                definition.get('name', name))
            if definition['path'] == 'ERROR':
                app.exit(definition, 'ERROR: no path, no name?')
        if definition.get('name') is None:
            definition['name'] = definition['path'].replace('/', '-')
        if definition['name'] == app.config['target']:
            app.config['target'] = definition['path']

        for system in (definition.get('systems', []) +
                       definition.get('subsystems', [])):
            self._fix_keys(system)
コード例 #53
0
ファイル: __main__.py プロジェクト: leeming/ybd
with app.timer('TOTAL'):
    tmp_lock = open(os.path.join(app.config['tmp'], 'lock'), 'r')
    fcntl.flock(tmp_lock, fcntl.LOCK_SH | fcntl.LOCK_NB)

    target = os.path.join(app.config['defdir'], app.config['target'])
    app.log('TARGET', 'Target is %s' % target, app.config['arch'])
    with app.timer('DEFINITIONS', 'parsing %s' % app.config['def-version']):
        defs = Definitions()
    with app.timer('CACHE-KEYS', 'cache-key calculations'):
        cache.cache_key(defs, app.config['target'])

    cache.cull(app.config['artifacts'])
    target = defs.get(app.config['target'])
    if app.config['total'] == 0 or (app.config['total'] == 1
                                    and target.get('kind') == 'cluster'):
        app.exit('ARCH', 'ERROR: no definitions found for', app.config['arch'])

    defs.save_trees()
    if app.config.get('mode', 'normal') == 'keys-only':
        with open(app.config['result-file'], 'w') as f:
            f.write(target['cache'] + '\n')
        app.log('RESULT', 'Cache-key for target is at',
                app.config['result-file'])
        os._exit(0)

    sandbox.executor = sandboxlib.executor_for_platform()
    app.log(app.config['target'], 'Sandbox using %s' % sandbox.executor)
    if sandboxlib.chroot == sandbox.executor:
        app.log(
            app.config['target'], 'WARNING: using chroot is less safe ' +
            'than using linux-user-chroot')
コード例 #54
0
ファイル: splitting.py プロジェクト: leeming/ybd
def install_split_artifacts(defs, component, stratum, artifacts):
    '''Create the .meta files for a split stratum

    Given a stratum and a list of artifacts to split, writes new .meta files to
    the baserock dir in the 'sandbox' dir of the component and copies the files
    from the .unpacked directory of each individual chunk to the sandbox

    '''
    if os.path.exists(os.path.join(component['sandbox'], 'baserock',
                                   stratum['name'] + '.meta')):
        return

    if artifacts is None:
        artifacts = []
        default_artifacts = defs.defaults.get_split_rules('stratum')
        for split in config.get('default-splits', []):
            artifacts += [stratum['name'] + split]

    log(component, 'Installing %s splits' % stratum['name'], artifacts)
    stratum_metadata = get_metadata(defs, stratum)
    split_stratum_metadata = {}
    split_stratum_metadata['products'] = []
    components = []
    for product in stratum_metadata['products']:
        for artifact in artifacts:
            if artifact == product['artifact']:
                components += product['components']
                split_stratum_metadata['products'].append(product)

    log(component, 'Splitting artifacts:', artifacts, verbose=True)
    log(component, 'Splitting components:', components, verbose=True)

    baserockpath = os.path.join(component['sandbox'], 'baserock')
    if not os.path.isdir(baserockpath):
        os.mkdir(baserockpath)
    split_stratum_metafile = os.path.join(baserockpath,
                                          stratum['name'] + '.meta')
    with open(split_stratum_metafile, "w") as f:
        yaml.safe_dump(split_stratum_metadata, f, default_flow_style=False)

    for path in stratum['contents']:
        chunk = defs.get(path)
        if chunk.get('build-mode', 'staging') == 'bootstrap':
            continue

        if not get_cache(defs, chunk):
            exit(stratum, 'ERROR: artifact not found', chunk.get('name'))

        try:
            metafile = path_to_metafile(defs, chunk)
            with open(metafile, "r") as f:
                filelist = []
                metadata = yaml.safe_load(f)
                split_metadata = {'ref': metadata.get('ref'),
                                  'repo': metadata.get('repo'),
                                  'products': []}
                if config.get('artifact-version', 0) not in [0, 1]:
                    metadata['cache'] = component.get('cache')

                for product in metadata['products']:
                    if product['artifact'] in components:
                        filelist += product.get('components', [])
                        # handle old artifacts still containing 'files'
                        filelist += product.get('files', [])

                        split_metadata['products'].append(product)

                if split_metadata['products'] != []:
                    split_metafile = os.path.join(baserockpath,
                                                  os.path.basename(metafile))
                    with open(split_metafile, "w") as f:
                        yaml.safe_dump(split_metadata, f,
                                       default_flow_style=False)

                    cachepath, cachedir = os.path.split(get_cache(defs, chunk))
                    path = os.path.join(cachepath, cachedir + '.unpacked')
                    utils.copy_file_list(path, component['sandbox'], filelist)
        except:
            # if we got here, something has gone badly wrong parsing metadata
            # or copying files into the sandbox...
            log(stratum, 'WARNING: failed copying files from', metafile)
            log(stratum, 'WARNING: copying *all* files')
            utils.copy_all_files(path, component['sandbox'])
コード例 #55
0
ファイル: wrangler.py プロジェクト: mwilliams-ct/ybd
def wrangle_recipes(source, output):
    app.exit('WRANGLER', 'ERROR: bitbake recipes in', source)
コード例 #56
0
ファイル: repos.py プロジェクト: mwilliams-ct/ybd
def update_mirror(name, repo, gitdir):
    with app.chdir(gitdir), open(os.devnull, "w") as fnull:
        app.log(name, 'Refreshing mirror for %s' % repo)
        if call(['git', 'remote', 'update', 'origin'], stdout=fnull,
                stderr=fnull):
            app.exit(name, 'ERROR: git update mirror failed', repo)
コード例 #57
0
ファイル: sandbox.py プロジェクト: mwilliams-ct/ybd
def run_sandboxed(this, command, env=None, allow_parallel=False):
    global executor

    app.log(this, 'Running command:\n%s' % command)
    with open(this['log'], "a") as logfile:
        logfile.write("# # %s\n" % command)

    mounts = ccache_mounts(this, ccache_target=env['CCACHE_DIR'])

    if this.get('build-mode') == 'bootstrap':
        # bootstrap mode: builds have some access to the host system, so they
        # can use the compilers etc.
        tmpdir = app.config.get("TMPDIR", "/tmp")

        writable_paths = [
            this['build'],
            this['install'],
            tmpdir,
        ]

        config = dict(
            cwd=this['build'],
            filesystem_root='/',
            filesystem_writable_paths=writable_paths,
            mounts='isolated',
            extra_mounts=[],
            network='isolated',
        )
    else:
        # normal mode: builds run in a chroot with only their dependencies
        # present.

        mounts.extend([
            (None, '/dev/shm', 'tmpfs'),
            (None, '/proc', 'proc'),
        ])

        if this.get('kind') == 'system':
            writable_paths = 'all'
        else:
            writable_paths = [
                this['name'] + '.build',
                this['name'] + '.inst',
                '/dev',
                '/proc',
                '/tmp',
            ]

        config = dict(
            cwd=this['name'] + '.build',
            filesystem_root=this['sandbox'],
            filesystem_writable_paths=writable_paths,
            mounts='isolated',
            extra_mounts=mounts,
            network='isolated',
        )

    argv = ['sh', '-c', command]

    cur_makeflags = env.get("MAKEFLAGS")

    # Adjust config for what the backend is capable of. The user will be warned
    # about any changes made.
    config = executor.degrade_config_for_capabilities(config, warn=False)

    try:
        if not allow_parallel:
            env.pop("MAKEFLAGS", None)

        app.log_env(this['log'], env, argv_to_string(argv))

        with open(this['log'], "a") as logfile:
            exit_code = executor.run_sandbox_with_redirection(
                argv,
                stdout=logfile,
                stderr=sandboxlib.STDOUT,
                env=env,
                **config)

        if exit_code != 0:
            app.log(this,
                    'ERROR: command failed in directory %s:\n\n' % os.getcwd(),
                    argv_to_string(argv))
            call(['tail', '-n', '200', this['log']])
            app.log(this, 'ERROR: log file is at', this['log'])
            app.exit(this, 'ERROR: sandbox debris is at', this['sandbox'])
    finally:
        if cur_makeflags is not None:
            env['MAKEFLAGS'] = cur_makeflags