Ejemplo n.º 1
0
    def generateTestDirList(self, builddir, dirname, source_files, component, immediate_dependencies, toplevel=False, module_is_empty=False):
        logger.debug('generate CMakeLists.txt for directory: %s' % os.path.join(component.path, dirname))

        link_dependencies = [x for x in immediate_dependencies]
        fname = os.path.join(builddir, dirname, 'CMakeLists.txt')

        # group the list of source files by subdirectory: generate one test for
        # each subdirectory, and one test for each file at the top level
        subdirs = defaultdict(list)
        toplevel_srcs = []
        for f in source_files:
            if f.lang in ('c', 'cpp', 'objc', 's'):
                subrelpath = os.path.relpath(f.relpath, dirname)
                subdir = fsutils.fullySplitPath(subrelpath)[0]
                if subdir and subdir != subrelpath:
                    subdirs[subdir].append(f)
                else:
                    toplevel_srcs.append(f)

        tests = []
        for f in toplevel_srcs:
            object_name = '%s-test-%s' % (
                component.getName(), os.path.basename(os.path.splitext(str(f))[0]).lower()
            )
            tests.append([[str(f)], object_name, [f.lang]])
        for subdirname, sources in sorted(subdirs.items(), key=lambda x: x[0]):
            object_name = '%s-test-%s' % (
                component.getName(), fsutils.fullySplitPath(subdirname)[0].lower()
            )
            tests.append([[str(f) for f in sources], object_name, [f.lang for f in sources]])

        # link tests against the main executable
        if not module_is_empty:
            link_dependencies.append(component.getName())

        # Find cmake files
        cmake_files = []
        for root, dires, files in os.walk(os.path.join(component.path, dirname)):
            for f in files:
                name, ext = os.path.splitext(f)
                if ext.lower() == '.cmake':
                    cmake_files.append(os.path.join(root, f))

        test_template = jinja_environment.get_template('test_CMakeLists.txt')

        file_contents = test_template.render({ #pylint: disable=no-member
             'source_directory':os.path.join(component.path, dirname),
                        'tests':tests,
            'link_dependencies':link_dependencies,
                  'cmake_files': cmake_files,
             'exclude_from_all': (not toplevel),
            'test_dependencies': [x[1] for x in immediate_dependencies.items() if x[1].isTestDependency()]
        })

        self._writeFile(fname, file_contents)
def unpackFrom(tar_file_path, to_directory):
    # first unpack into a sibling directory of the specified directory, and
    # then move it into place.

    # we expect our tarballs to contain a single top-level directory. We strip
    # off this name as we extract to minimise the path length

    into_parent_dir = os.path.dirname(to_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)

    with tarfile.open(tar_file_path) as tf:
        strip_dirname = ''
        # get the extraction directory name from the first part of the
        # extraction paths: it should be the same for all members of
        # the archive
        for m in tf.getmembers():
            split_path = fsutils.fullySplitPath(m.name)
            logger.debug('process member: %s %s', m.name, split_path)
            if os.path.isabs(m.name) or '..' in split_path:
                raise ValueError('archive uses invalid paths')
            if not strip_dirname:
                if len(split_path) != 1 or not len(split_path[0]):
                    raise ValueError('archive does not appear to contain a single module')
                strip_dirname = split_path[0]
                continue
            else:
                if split_path[0] != strip_dirname:
                    raise ValueError('archive does not appear to contain a single module')
            m.name = os.path.join(*split_path[1:])
            tf.extract(m, path=temp_directory)
    shutil.move(temp_directory, to_directory)
    logger.debug('extraction complete %s', to_directory)
Ejemplo n.º 3
0
def unpackFrom(tar_file_path, to_directory):
    # first unpack into a sibling directory of the specified directory, and
    # then move it into place.

    # we expect our tarballs to contain a single top-level directory. We strip
    # off this name as we extract to minimise the path length

    into_parent_dir = os.path.dirname(to_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)

    with tarfile.open(tar_file_path) as tf:
        strip_dirname = ''
        # get the extraction directory name from the first part of the
        # extraction paths: it should be the same for all members of
        # the archive
        for m in tf.getmembers():
            split_path = fsutils.fullySplitPath(m.name)
            logger.debug('process member: %s %s', m.name, split_path)
            if os.path.isabs(m.name) or '..' in split_path:
                raise ValueError('archive uses invalid paths')
            if not strip_dirname:
                if len(split_path) != 1 or not len(split_path[0]):
                    raise ValueError(
                        'archive does not appear to contain a single module')
                strip_dirname = split_path[0]
                continue
            else:
                if split_path[0] != strip_dirname:
                    raise ValueError(
                        'archive does not appear to contain a single module')
            m.name = os.path.join(*split_path[1:])
            tf.extract(m, path=temp_directory)
    shutil.move(temp_directory, to_directory)
    logger.debug('extraction complete %s', to_directory)
Ejemplo n.º 4
0
    def generateTestDirList(self, builddir, dirname, source_files, component,
                            immediate_dependencies):
        logger.debug('generate CMakeLists.txt for directory: %s' %
                     os.path.join(component.path, dirname))

        link_dependencies = [x for x in immediate_dependencies]
        fname = os.path.join(builddir, dirname, 'CMakeLists.txt')

        # group the list of source files by subdirectory: generate one test for
        # each subdirectory, and one test for each file at the top level
        subdirs = defaultdict(list)
        toplevel_srcs = []
        for f in source_files:
            if f.lang in ('c', 'cpp', 'objc'):
                subrelpath = os.path.relpath(f.relpath, dirname)
                subdir = os.path.split(subrelpath)[0]
                if subdir:
                    subdirs[subdir].append(f)
                else:
                    toplevel_srcs.append(f)

        tests = []
        for f in toplevel_srcs:
            object_name = '%s-test-%s' % (
                component.getName(),
                os.path.basename(os.path.splitext(str(f))[0]).lower())
            tests.append([[str(f)], object_name, [f.lang]])
        for subdirname, sources in subdirs.items():
            object_name = '%s-test-%s' % (component.getName(
            ), fsutils.fullySplitPath(subdirname)[0].lower())
            tests.append([[str(f) for f in sources], object_name,
                          [f.lang for f in sources]])

        # link tests against the main executable
        link_dependencies.append(component.getName())

        # Find cmake files
        cmake_files = []
        for root, dires, files in os.walk(os.path.join(component.path,
                                                       dirname)):
            for f in files:
                name, ext = os.path.splitext(f)
                if ext.lower() == '.cmake':
                    cmake_files.append(os.path.join(root, f))

        test_template = jinja_environment.get_template('test_CMakeLists.txt')

        file_contents = test_template.render({
            'source_directory':
            os.path.join(component.path, dirname),
            'tests':
            tests,
            'link_dependencies':
            link_dependencies,
            'cmake_files':
            cmake_files
        })

        self._writeFile(fname, file_contents)
Ejemplo n.º 5
0
def unpackTarballStream(stream, into_directory, hash=(None, None)):
    ''' Unpack a stream-like object that contains a tarball into a directory
    '''
    hash_name = hash[0]
    hash_value = hash[1]

    if hash_name:
        m = getattr(hashlib, hash_name)()

    into_parent_dir = os.path.dirname(into_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    download_fname = os.path.join(temp_directory, 'download.tar.gz')
    # remove any partially downloaded file: TODO: checksumming & caching of
    # downloaded components in some central place
    fsutils.rmF(download_fname)
    # create the archive exclusively, we don't want someone else maliciously
    # overwriting our tar archive with something that unpacks to an absolute
    # path when we might be running sudo'd
    try:
        fd = os.open(
            download_fname,
            os.O_CREAT | os.O_EXCL | os.O_RDWR | getattr(os, "O_BINARY", 0))
        with os.fdopen(fd, 'rb+') as f:
            f.seek(0)

            for chunk in stream.iter_content(1024):
                f.write(chunk)
                if hash_name:
                    m.update(chunk)

            if hash_name:
                calculated_hash = m.hexdigest()
                logging.debug('calculated hash: %s check against: %s' %
                              (calculated_hash, hash_value))
                if hash_value and (hash_value != calculated_hash):
                    raise Exception('Hash verification failed.')
            f.truncate()
            logging.debug('got file, extract into %s (for %s)', temp_directory,
                          into_directory)
            # head back to the start of the file and untar (without closing the
            # file)
            f.seek(0)
            f.flush()
            os.fsync(f)
            with tarfile.open(fileobj=f) as tf:
                to_extract = []
                # modify members to change where they extract to!
                for m in tf.getmembers():
                    split_path = fsutils.fullySplitPath(m.name)
                    if len(split_path) > 1:
                        m.name = os.path.join(*(split_path[1:]))
                        to_extract.append(m)
                tf.extractall(path=temp_directory, members=to_extract)

        # remove the temporary download file, maybe in the future we will cache
        # these somewhere
        fsutils.rmRf(os.path.join(into_directory, 'download.tar.gz'))

        # move the directory we extracted stuff into to where we actually want it
        # to be
        fsutils.rmRf(into_directory)
        shutil.move(temp_directory, into_directory)

    finally:
        fsutils.rmRf(temp_directory)

    logging.debug('extraction complete %s', into_directory)
Ejemplo n.º 6
0
def unpackTarballStream(stream, into_directory, hash=(None, None)):
    ''' Unpack a stream-like object that contains a tarball into a directory
    '''
    hash_name = hash[0]
    hash_value = hash[1]

    if hash_name:
        m = getattr(hashlib, hash_name)()

    into_parent_dir = os.path.dirname(into_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    download_fname = os.path.join(temp_directory, 'download.tar.gz')
    # remove any partially downloaded file: TODO: checksumming & caching of
    # downloaded components in some central place
    fsutils.rmF(download_fname)
    # create the archive exclusively, we don't want someone else maliciously
    # overwriting our tar archive with something that unpacks to an absolute
    # path when we might be running sudo'd
    try:
        fd = os.open(download_fname, os.O_CREAT | os.O_EXCL |
                                     os.O_RDWR | getattr(os, "O_BINARY", 0))
        with os.fdopen(fd, 'rb+') as f:
            f.seek(0)
            
            for chunk in stream.iter_content(1024):
                f.write(chunk)
                if hash_name:
                    m.update(chunk)

            if hash_name:
                calculated_hash = m.hexdigest()
                logging.debug(
                    'calculated hash: %s check against: %s' % (calculated_hash, hash_value))
                if hash_value and (hash_value != calculated_hash):
                    raise Exception('Hash verification failed.')
            f.truncate()
            logging.debug(
                'got file, extract into %s (for %s)', temp_directory, into_directory)
            # head back to the start of the file and untar (without closing the
            # file)
            f.seek(0)
            f.flush()
            os.fsync(f)
            with tarfile.open(fileobj=f) as tf:
                to_extract = []
                # modify members to change where they extract to!
                for m in tf.getmembers():
                    split_path = fsutils.fullySplitPath(m.name)
                    if len(split_path) > 1:
                        m.name = os.path.join(*(split_path[1:]))
                        to_extract.append(m)
                tf.extractall(path=temp_directory, members=to_extract)

        # remove the temporary download file, maybe in the future we will cache
        # these somewhere
        fsutils.rmRf(os.path.join(into_directory, 'download.tar.gz'))

        # move the directory we extracted stuff into to where we actually want it
        # to be
        fsutils.rmRf(into_directory)
        shutil.move(temp_directory, into_directory)

    finally:
        fsutils.rmRf(temp_directory)

    logging.debug('extraction complete %s', into_directory)
Ejemplo n.º 7
0
def unpackTarballStream(stream, into_directory, hash=(None, None)):
    ''' Unpack a responses stream that contains a tarball into a directory
    '''
    hash_name = hash[0]
    hash_value = hash[1]

    if hash_name:
        m = getattr(hashlib, hash_name)()

    into_parent_dir = os.path.dirname(into_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    download_fname = os.path.join(temp_directory, 'download.tar.gz')
    # remove any partially downloaded file: TODO: checksumming & caching of
    # downloaded components in some central place
    fsutils.rmF(download_fname)
    # create the archive exclusively, we don't want someone else maliciously
    # overwriting our tar archive with something that unpacks to an absolute
    # path when we might be running sudo'd
    try:
        with _openExclusively(download_fname) as f:
            f.seek(0)
            for chunk in stream.iter_content(1024):
                f.write(chunk)
                if hash_name:
                    m.update(chunk)

            if hash_name:
                calculated_hash = m.hexdigest()
                logger.debug(
                    'calculated %s hash: %s check against: %s' % (
                        hash_name, calculated_hash, hash_value
                    )
                )
                if hash_value and (hash_value != calculated_hash):
                    raise Exception('Hash verification failed.')
            logger.debug('wrote tarfile of size: %s to %s', f.tell(), download_fname)
            f.truncate()
            logger.debug(
                'got file, extract into %s (for %s)', temp_directory, into_directory
            )
            # head back to the start of the file and untar (without closing the
            # file)
            f.seek(0)
            f.flush()
            os.fsync(f)
            with tarfile.open(fileobj=f) as tf:
                extracted_dirname = ''
                # get the extraction directory name from the first part of the
                # extraction paths: it should be the same for all members of
                # the archive
                for m in tf.getmembers():
                    split_path = fsutils.fullySplitPath(m.name)
                    if len(split_path) > 1:
                        if extracted_dirname:
                            if split_path[0] != extracted_dirname:
                                raise ValueError('archive does not appear to contain a single module')
                        else:
                            extracted_dirname = split_path[0]
                tf.extractall(path=temp_directory)

        # move the directory we extracted stuff into to where we actually want it
        # to be
        fsutils.rmRf(into_directory)
        shutil.move(os.path.join(temp_directory, extracted_dirname), into_directory)

    finally:
        fsutils.rmF(download_fname)
        fsutils.rmRf(temp_directory)

    logger.debug('extraction complete %s', into_directory)
Ejemplo n.º 8
0
def unpackTarballStream(stream, into_directory, hash=(None, None)):
    ''' Unpack a responses stream that contains a tarball into a directory
    '''
    hash_name = hash[0]
    hash_value = hash[1]

    if hash_name:
        m = getattr(hashlib, hash_name)()

    into_parent_dir = os.path.dirname(into_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    download_fname = os.path.join(temp_directory, 'download.tar.gz')
    # remove any partially downloaded file: TODO: checksumming & caching of
    # downloaded components in some central place
    fsutils.rmF(download_fname)
    # create the archive exclusively, we don't want someone else maliciously
    # overwriting our tar archive with something that unpacks to an absolute
    # path when we might be running sudo'd
    try:
        with _openExclusively(download_fname) as f:
            f.seek(0)
            for chunk in stream.iter_content(1024):
                f.write(chunk)
                if hash_name:
                    m.update(chunk)

            if hash_name:
                calculated_hash = m.hexdigest()
                logger.debug(
                    'calculated %s hash: %s check against: %s' % (
                        hash_name, calculated_hash, hash_value
                    )
                )
                if hash_value and (hash_value != calculated_hash):
                    raise Exception('Hash verification failed.')
            logger.debug('wrote tarfile of size: %s to %s', f.tell(), download_fname)
            f.truncate()
            logger.debug(
                'got file, extract into %s (for %s)', temp_directory, into_directory
            )
            # head back to the start of the file and untar (without closing the
            # file)
            f.seek(0)
            f.flush()
            os.fsync(f)
            with tarfile.open(fileobj=f) as tf:
                extracted_dirname = ''
                # get the extraction directory name from the first part of the
                # extraction paths: it should be the same for all members of
                # the archive
                for m in tf.getmembers():
                    split_path = fsutils.fullySplitPath(m.name)
                    if len(split_path) > 1:
                        if extracted_dirname:
                            if split_path[0] != extracted_dirname:
                                raise ValueError('archive does not appear to contain a single module')
                        else:
                            extracted_dirname = split_path[0]
                tf.extractall(path=temp_directory)

        # move the directory we extracted stuff into to where we actually want it
        # to be
        fsutils.rmRf(into_directory)
        shutil.move(os.path.join(temp_directory, extracted_dirname), into_directory)

    finally:
        fsutils.rmF(download_fname)
        fsutils.rmRf(temp_directory)

    logger.debug('extraction complete %s', into_directory)