Exemple #1
0
    def _tar_config_files(self, tarfile):
        import glob
        import os

        for config_file in glob.glob('*.config.yaml'):
            tarfile.add(config_file,
                        arcname=os.path.join(self._job_name, config_file))
Exemple #2
0
    def createArchive(self, cfgOutName, name='', mode='w:gz'):
        """
        create the archive to upload
        """

        if not name:
            import uuid
            name = os.path.join(os.getcwd(), str(uuid.uuid4()) + 'default.tgz')

        import tarfile
        print 'opening tar file'
        tarfile = tarfile.open(name=name, mode=mode, dereference=True)
        print 'adding %s to the tarball' % cfgOutName
        tarfile.add(cfgOutName, arcname='PSet.py')

        #checkSum
        print 'calculating the checksum'
        lsl = [(x.name, int(x.size), int(x.mtime), x.uname)
               for x in tarfile.getmembers()]
        # hasher = hashlib.md5(str(lsl))
        hasher = hashlib.sha256(str(lsl))
        checksum = hasher.hexdigest()
        #end
        tarfile.close()

        return name, checksum
Exemple #3
0
    def _tar_resources(self, tarfile):
        import os

        module_directory = os.path.dirname(os.path.abspath(__file__))
        resource_directory = os.path.join(module_directory, "resources")
        os.chdir(resource_directory)
        tarfile.add(".", arcname=self._job_name)
Exemple #4
0
 def createArchive(self, cfgOutName , name = '', mode = 'w:gz'):
     """
     create the archive to upload
     """
      
     if not name:
         import uuid
         name = os.path.join(os.getcwd(), str(uuid.uuid4()) +'default.tgz')
      
     import tarfile
     print 'opening tar file'
     tarfile = tarfile.open(name=name , mode=mode, dereference=True)
     print 'adding %s to the tarball' % cfgOutName
     tarfile.add(cfgOutName, arcname='PSet.py')
      
     #checkSum
     print 'calculating the checksum'
     lsl = [(x.name, int(x.size), int(x.mtime), x.uname) for x in tarfile.getmembers()]
     # hasher = hashlib.md5(str(lsl))
     hasher = hashlib.sha256(str(lsl))
     checksum = hasher.hexdigest()
     #end
     tarfile.close()
      
     return name, checksum
Exemple #5
0
 def pack_into(self, tarfile):
     for (dirpath, _, filenames) in os.walk(self.__source_dir):
         rel_dir = os.path.normpath(os.path.relpath(dirpath, self.__source_dir))
         for filename in filenames:
             if not re.match('.*\.h$', filename) is None:
                 full_src = os.path.join(self.__source_dir, rel_dir, filename)
                 full_tgt = os.path.join(self.__target_dir, rel_dir, filename)
                 tarfile.add(name = full_src, arcname = full_tgt)
Exemple #6
0
def recursively_add_directory(tarfile, directory_path, path_in_archive, exclude=None):
    for sub_path, dirs, files in os.walk(directory_path):
        assert sub_path[:len(directory_path)]==directory_path
        relative_dir = sub_path[len(directory_path):]
        if len(relative_dir)>0 and relative_dir[0] in [os.path.sep, os.path.altsep]:
            relative_dir = relative_dir[1:]
        for fname in files:
            if exclude is None or not exclude(fname):
                tarfile.add(path.join(sub_path, fname), path.join(path_in_archive, relative_dir, fname))
Exemple #7
0
def recursively_add_directory(tarfile, directory_path, path_in_archive, exclude=None):
    for sub_path, dirs, files in os.walk(directory_path):
        assert sub_path[: len(directory_path)] == directory_path
        relative_dir = sub_path[len(directory_path) :]
        if len(relative_dir) > 0 and relative_dir[0] in [os.path.sep, os.path.altsep]:
            relative_dir = relative_dir[1:]
        for fname in files:
            if exclude is None or not exclude(fname):
                tarfile.add(path.join(sub_path, fname), path.join(path_in_archive, relative_dir, fname))
def add_meta_files(tarfile, meta_files_dir):
    """Adds the meta files to the specified tarfile.

    Args:
        tarfile: The tarfile object where the files needs to be added.
        meta_files_dir: The directory containing the meta files.
    """
    tarfile.add(os.path.join(meta_files_dir, 'TIMESTAMP'), arcname='TIMESTAMP')
    tarfile.add(os.path.join(meta_files_dir, 'SCHEMA_SEQUENCE'), arcname='SCHEMA_SEQUENCE')
Exemple #9
0
    def _tar_env(self, tarfile):
        import os
        from foundations_contrib.simple_tempfile import SimpleTempfile
        from foundations_contrib.job_bundling.script_environment import ScriptEnvironment

        with SimpleTempfile('w+') as temp_file:
            ScriptEnvironment(self._config).write_environment(temp_file)
            tarfile.add(temp_file.name,
                        arcname=os.path.join(self._job_name, 'run.env'))
Exemple #10
0
    def _tar_foundations_modules(self, tarfile):
        from foundations_internal.global_state import module_manager
        import os

        for module_name, module_directory in module_manager.module_directories_and_names(
        ):
            self._log().debug('Adding module {} at {}'.format(
                module_name, module_directory))
            tarfile.add(module_directory,
                        arcname=self._job_name + os.path.sep + module_name)
Exemple #11
0
    def bk_now(self, tables=None):
        '''
		使用逻辑备份导出指定用户的数据表,参数tables为表名元组,默认为空时导出整个用户空间。
		'''
        timestamp = str(
            time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time())))

        if utils.is_windows():
            bak_dir = self.bkdir + '\\' + timestamp
            dmp_file = bak_dir + '\\' + timestamp + '_exp.dmp'
            log_file = bak_dir + '\\' + timestamp + '_exp.log'
            options = self.conf['seruser'] + '/' + self.conf['serpass'] + ' buffer=64000 file=' \
             + dmp_file + ' log=' + log_file
            if tables is not None:
                options += ' tables=' + tables
            else:
                options += ' owner=' + self.conf['seruser']
            os.mkdir(bak_dir)
            if os.system('exp ' + options) == 0:
                zip_file = bak_dir + '\\' + timestamp + '_exp.zip'
                myzipfile = zipfile.ZipFile(zip_file, 'w')
                myzipfile.write(dmp_file)
                myzipfile.close()
                return True, zip_file
            else:
                print 'backup oprations failed!'
                return False, None
        elif utils.is_linux():
            bak_dir = self.bkdir + '/' + timestamp
            dmp_file = bak_dir + '/' + timestamp + '_exp.dmp'
            log_file = bak_dir + '/' + timestamp + '_exp.log'
            options = self.conf['seruser'] + '/' + self.conf['serpass'] + ' buffer=64000 file=' \
             + dmp_file + ' log=' + log_file
            if tables is not None:
                options += ' tables=' + tables
            else:
                options += ' owner=' + self.conf['seruser']
            os.mkdir(bak_dir)
            if os.system('exp ' + options) == 0:
                tar_file = bak_dir + '/' + timestamp + '_exp.tar.gz'
                tarfile = tarfile.open(tar_file, 'w:gz')
                tarfile.add(dmp_file)
                tarfile.close()
                return True, tar_file
        else:
            print 'Unkown System Type!'
            sys.exit(1)
Exemple #12
0
	def bk_now(self, tables=None):
		'''
		使用逻辑备份导出指定用户的数据表,参数tables为表名元组,默认为空时导出整个用户空间。
		'''
		timestamp = str(time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time())))
		
		if utils.is_windows():
			bak_dir = self.bkdir + '\\' + timestamp
			dmp_file = bak_dir + '\\' + timestamp + '_exp.dmp'
			log_file = bak_dir + '\\' + timestamp + '_exp.log'
			options = self.conf['seruser'] + '/' + self.conf['serpass'] + ' buffer=64000 file=' \
				+ dmp_file + ' log=' + log_file
			if tables is not None:
				options += ' tables=' + tables
			else:
				options += ' owner=' + self.conf['seruser']
			os.mkdir(bak_dir)
			if os.system('exp ' + options) == 0:
				zip_file = bak_dir + '\\' + timestamp + '_exp.zip'
				myzipfile = zipfile.ZipFile(zip_file, 'w')
				myzipfile.write(dmp_file)
				myzipfile.close()
				return True, zip_file
			else:
				print 'backup oprations failed!'
				return False, None
		elif utils.is_linux():
			bak_dir = self.bkdir + '/' + timestamp
			dmp_file = bak_dir + '/' + timestamp + '_exp.dmp'
			log_file = bak_dir + '/' + timestamp + '_exp.log'
			options = self.conf['seruser'] + '/' + self.conf['serpass'] + ' buffer=64000 file=' \
				+ dmp_file + ' log=' + log_file
			if tables is not None:
				options += ' tables=' + tables
			else:
				options += ' owner=' + self.conf['seruser']
			os.mkdir(bak_dir)
			if os.system('exp ' + options) == 0:
				tar_file = bak_dir + '/' + timestamp + '_exp.tar.gz'
				tarfile = tarfile.open(tar_file, 'w:gz')
				tarfile.add(dmp_file)
				tarfile.close()
				return True, tar_file
		else:
			print 'Unkown System Type!'
			sys.exit(1)
Exemple #13
0
    def glob_bak(self):
        timestamp = str(
            time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time())))

        if utils.is_windows():
            dump_dir = self.bkdir + '\\' + timestamp
            dump_file = dump_dir + '\\' + timestamp + '_glob.sql'
            zip_file_path = dump_dir + '\\' + timestamp + '_glob.zip'
            log_file = dump_dir + '\\' + timestamp + '_glob.log'
            options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \
              + ' -p' + self.conf['serpass'] + ' --all-databases --flush-logs --master-data=2'

            os.mkdir(dump_dir)
            os.system('purge master logs')
            if os.system('mysqldump ' + options + ' > ' + dump_file) == 0:
                myzipfile = zipfile.ZipFile(zip_file_path, 'w')
                myzipfile.write(dump_file)
                myzipfile.close()
                os.system('echo "DataBase Backup Success!" >> ' + log_file)
                return True, zip_file_path
            else:
                os.system('echo "DataBase Backup Failed! >> ' + log_file)
                return False, None
        elif utils.is_linux():
            dump_dir = self.bkdir + '/' + timestamp
            dump_file = dump_dir + '/' + timestamp + '_glob.sql'
            tar_file = dump_dir + '/' + timestamp + '_glob.tar.gz'
            log_file = dump_dir + '/' + timestamp + '_glob.log'
            options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \
              + ' -p' + self.conf['serpass'] + ' --all-databases --flush-logs --master-data=2'

            os.mkdir(dump_dir)
            os.system('purge master logs')
            if os.system('mysqldump ' + options + ' > ' + dump_file) == 0:
                tarfile = tarfile.open(tar_file, 'w:gz')
                tarfile.add(dump_file)
                tarfile.close()
                os.system('echo "DataBase Backup Success!" >> ' + log_file)
                return True, tar_file
            else:
                os.system('echo "DataBase Backup Failed! >> ' + log_file)
                return False, None
        else:
            print 'Unkown System Type!'
            sys.exit(1)
Exemple #14
0
    def incr_bak(self):
        timestamp = str(
            time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time())))

        if utils.is_windows():
            dump_dir = self.bkdir + '\\' + timestamp
            zip_file_path = dump_dir + '\\' + timestamp + '_incr.zip'
            log_file = dump_dir + '\\' + timestamp + '_incr.log'
            options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \
              + ' -p' + self.conf['serpass'] + ' flush-logs'

            os.mkdir(dump_dir)
            if os.system('mysqladmin ' + options) == 0:
                logbin_file = self._get_logbin_last()
                print logbin_file
                myzipfile = zipfile.ZipFile(zip_file_path, 'w')
                myzipfile.write(logbin_file)
                myzipfile.close()
                os.system('echo "DataBase Backup Success!" >> ' + log_file)
                return True, zip_file_path
            else:
                os.system('echo "DataBase Backup Failed! >> ' + log_file)
                return False, None
        elif utils.is_linux():
            dump_dir = self.bkdir + '/' + timestamp
            tar_file = dump_dir + '/' + timestamp + '_incr.tar.gz'
            log_file = dump_dir + '/' + timestamp + '_incr.log'
            options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \
              + ' -p' + self.conf['serpass'] + ' flush-logs'

            os.mkdir(dump_dir)
            if os.system('mysqladmin ' + options) == 0:
                logbin_file = self._get_logbin_last()
                tarfile = tarfile.open(tar_file, 'w:gz')
                tarfile.add(logbin_file)
                tarfile.close()
                os.system('echo "DataBase Backup Success!" >> ' + log_file)
                return True, tar_file
            else:
                os.system('echo "DataBase Backup Failed! >> ' + log_file)
                return False, None
        else:
            print 'Unkown System Type!'
            sys.exit(1)
        pass
Exemple #15
0
	def incr_bak(self):
		timestamp = str(time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time())))
		
		if utils.is_windows():
			dump_dir = self.bkdir + '\\' + timestamp
			zip_file_path = dump_dir + '\\' + timestamp + '_incr.zip'
			log_file = dump_dir + '\\' + timestamp + '_incr.log'
			options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \
					+ ' -p' + self.conf['serpass'] + ' flush-logs'
			
			os.mkdir(dump_dir)
			if os.system('mysqladmin ' + options) == 0:
				logbin_file = self._get_logbin_last()
				print logbin_file
				myzipfile = zipfile.ZipFile(zip_file_path, 'w')
				myzipfile.write(logbin_file)
				myzipfile.close()
				os.system('echo "DataBase Backup Success!" >> ' + log_file)
				return True, zip_file_path
			else:
				os.system('echo "DataBase Backup Failed! >> ' + log_file)
				return False, None
		elif utils.is_linux():
			dump_dir = self.bkdir + '/' + timestamp
			tar_file = dump_dir + '/' + timestamp + '_incr.tar.gz'
			log_file = dump_dir + '/' + timestamp + '_incr.log'
			options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \
					+ ' -p' + self.conf['serpass'] + ' flush-logs'
			
			os.mkdir(dump_dir)
			if os.system('mysqladmin ' + options) == 0:
				logbin_file = self._get_logbin_last()
				tarfile = tarfile.open(tar_file, 'w:gz')
				tarfile.add(logbin_file)
				tarfile.close()
				os.system('echo "DataBase Backup Success!" >> ' + log_file)
				return True, tar_file
			else:
				os.system('echo "DataBase Backup Failed! >> ' + log_file)
				return False, None
		else:
			print 'Unkown System Type!'
			sys.exit(1)
		pass    
Exemple #16
0
	def glob_bak(self):
		timestamp = str(time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime(time.time())))
		
		if utils.is_windows():
			dump_dir = self.bkdir + '\\' + timestamp
			dump_file = dump_dir + '\\' + timestamp + '_glob.sql'
			zip_file_path = dump_dir + '\\' + timestamp + '_glob.zip'
			log_file = dump_dir + '\\' + timestamp + '_glob.log'
			options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \
					+ ' -p' + self.conf['serpass'] + ' --all-databases --flush-logs --master-data=2'
			
			os.mkdir(dump_dir)
			os.system('purge master logs')
			if os.system('mysqldump ' + options + ' > ' + dump_file) == 0:
				myzipfile = zipfile.ZipFile(zip_file_path, 'w')
				myzipfile.write(dump_file)
				myzipfile.close()
				os.system('echo "DataBase Backup Success!" >> ' + log_file)
				return True, zip_file_path
			else:
				os.system('echo "DataBase Backup Failed! >> ' + log_file)
				return False, None
		elif utils.is_linux():
			dump_dir = self.bkdir + '/' + timestamp
			dump_file = dump_dir + '/' + timestamp + '_glob.sql'
			tar_file = dump_dir + '/' + timestamp + '_glob.tar.gz'
			log_file = dump_dir + '/' + timestamp + '_glob.log'
			options = '-h' + self.conf['serip'] + ' -u' + self.conf['seruser'] \
					+ ' -p' + self.conf['serpass'] + ' --all-databases --flush-logs --master-data=2'
			
			os.mkdir(dump_dir)
			os.system('purge master logs')
			if os.system('mysqldump ' + options + ' > ' + dump_file) == 0:
				tarfile = tarfile.open(tar_file, 'w:gz')
				tarfile.add(dump_file)
				tarfile.close()
				os.system('echo "DataBase Backup Success!" >> ' + log_file)
				return True, tar_file
			else:
				os.system('echo "DataBase Backup Failed! >> ' + log_file)
				return False, None
		else:
			print 'Unkown System Type!'
			sys.exit(1)
Exemple #17
0
    def save(self, tarfile):
        print(self.camera_files)
        # Save the camera files
        for name, file in self.camera_files.items():
            f = NamedTemporaryFile(delete=False)
            try:
                # Convert to jpg 70% quality
                w, h, data = convert_image(file, 70)
                f.write(data)
                f.close()

                arcname = "{}/{}.jpg".format(self.index, name)
                tarfile.add(f.name, arcname=arcname)
            finally:
                os.unlink(f.name)

        meta = lisa_pb2.Frame()
        self.egopose.apply(meta)
        print(meta)
        print(meta.SerializeToString())

        f = NamedTemporaryFile(delete=False)

        try:
            f.write(meta.SerializeToString())
            f.close()
            tarfile.add(f.name, arcname="%d/metadata.proto" % (self.index))
            tarfile.add(self.pcd_file, arcname="%d/points.pcd" % (self.index))
        finally:
            os.unlink(f.name)
# Remove duplicates.
files_for_tarball = set(files_for_tarball)

with tarfile.open(os.path.join(html_output_dir, example_name + '.tar.gz'), \
        'w:gz', dereference=True) as tarfile:
    example_dir_idx = example_dir.index(example_name)
    def strip_path(tarinfo):
        tarinfo.name = tarinfo.name[example_dir_idx-1:]
        # Put the inputs and outputs into the build directory because the test
        # will not be able to find them otherwise.
        basename = os.path.basename(tarinfo.name)
        if basename == 'CMakeLists.txt.tarball':
                head, tail = os.path.split(tarinfo.name)
                tarinfo.name = os.path.join(head, 'CMakeLists.txt')
        return tarinfo
    for path in files_for_tarball:
        tarfile.add(path, filter=strip_path)

with zipfile.ZipFile(os.path.join(html_output_dir, example_name + '.zip'), \
        'w') as zipfile:
    example_dir_idx = example_dir.index(example_name)
    for path in files_for_tarball:
        arcname = path[example_dir_idx-1:]
        # Put the inputs and outputs into the build directory because the test
        # will not be able to find them otherwise.
        basename = os.path.basename(arcname)
        if basename == 'CMakeLists.txt.tarball':
                head, tail = os.path.split(arcname)
                arcname = os.path.join(head, 'CMakeLists.txt')
        zipfile.write(path, arcname)
Exemple #19
0
import tarfile as tar
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart

content = MIMEMultipart()  #建立MIMEMultipart物件
content["subject"] = "Mail title"  #郵件標題
content["from"] = "*****@*****.**"  #寄件者
content["to"] = "*****@*****.**"  #收件者

# 壓縮log檔案
tar = tar.open("file name", "w:gz")
for root, dir, files in os.walk("/var/log"):
    for file in files:
        if "secure" in file:
            fullpath = os.path.join(root, file)
            tar.add(fullpath)
tar.close()

# 文字內容
content.attach(MIMEText("hdfs-overcomer.ddns.net secure log files."))  #郵件內容

# 附加檔案
att = MIMEText(open('compress file name', 'rb').read(), 'base64', 'utf-8')
att["Content-Type"] = 'application/octet-stream'
att["Content-Disposition"] = 'attachment; filename="compress file name"'

content.attach(att)  #郵件內容

import smtplib

with smtplib.SMTP(host="smtp.gmail.com", port="587") as smtp:  # 設定SMTP伺服器
Exemple #20
0
 def pack_into(self, tarfile):
     tarfile.add(name = self.__source, arcname = self.__target)
Exemple #21
0
    def _tar_job_source_bundle_archive(self, tarfile):
        import os

        tarfile.add(self._job_source_bundle.job_archive(),
                    arcname=os.path.join(self._job_name,
                                         self._job_source_name))
Exemple #22
0
                songname = basename
                if trackno.match(songname):
                    songname = trackno.sub('', songname)
                albumdir = os.path.basename(dirpath)
                artistdir = os.path.basename(os.path.dirname(dirpath))
                key = (artistdir, albumdir, songname, dirpath)
                value = os.path.join(dirpath, filename)
                found[key].append((index, value))
                count += 1
    artists = sorted(set([x[0] for x in found]))
    albums = sorted(set([x[:2] for x in found]))

    # re-do everything by artist/album
    by_artist_album = collections.defaultdict(dict)
    for artist, album in albums:
        for key in found:
            if key[0] == artist and key[1] == album:
                by_artist_album[(artist, album)][key[2]] = found[key]

    for (artist, album), songs in by_artist_album.items():
        # figure the albumdir
        albumdir = os.path.dirname(songs[songs.keys()[0]][0][1])
        archive_path = os.path.join(albumdir, '@'.join([re.sub('[^-A-Za-z0-9_]', '_', artist), re.sub('[^-A-Za-z0-9_]', '_', album)]) + ".tar.bz2")
        tarfile = tarfile.open(archive_path, mode='w:bz2')
        for songname, locations in songs.items():
            for idx, location in locations:
                tarfile.add(location, arcname=songname+".mp3")
                rmfile.write("%s\n" % (location,))
        tarfile.close()
        print archive_path
Exemple #23
0
    def _tar_job_binary(self, tarfile):
        import os

        tarfile.add(self._job_binary(),
                    arcname=os.path.join(self._job_name, self._job_binary()))
Exemple #24
0
inputs = glob.glob(os.path.join(example_dir, '*.md5'))
for path in inputs:
    files_for_tarball.append(path[:-4])
with tarfile.open(os.path.join(html_output_dir, example_name + '.tar.gz'), \
        'w:gz', dereference=True) as tarfile:
    example_dir_idx = example_dir.index(example_name)
    def strip_path(tarinfo):
        tarinfo.name = tarinfo.name[example_dir_idx-1:]
        # Put the inputs and outputs into the build directory because the test
        # will not be able to find them otherwise.
        basename = os.path.basename(tarinfo.name)
        if basename == 'CMakeLists.txt.tarball':
                head, tail = os.path.split(tarinfo.name)
                tarinfo.name = os.path.join(head, 'CMakeLists.txt')
        return tarinfo
    for path in files_for_tarball:
        tarfile.add(path, filter=strip_path)

with zipfile.ZipFile(os.path.join(html_output_dir, example_name + '.zip'), \
        'w') as zipfile:
    example_dir_idx = example_dir.index(example_name)
    for path in files_for_tarball:
        arcname = path[example_dir_idx-1:]
        # Put the inputs and outputs into the build directory because the test
        # will not be able to find them otherwise.
        basename = os.path.basename(arcname)
        if basename == 'CMakeLists.txt.tarball':
                head, tail = os.path.split(arcname)
                arcname = os.path.join(head, 'CMakeLists.txt')
        zipfile.write(path, arcname)
        directories = ['lib', 'biglib', 'module']
        if add_python:
            directories.extend(['python', 'cfipython'])
        if add_external:
            directories.append('external')
        if add_examples:
            directories.append('src/UHH2/examples/config')

        for directory in directories:
            full_path = os.path.join(CMSSW_BASE, directory)
            logger.debug("Checking directory %s" % full_path)
            if os.path.exists(full_path):
                logger.debug("Adding directory %s to tarball" % full_path)
                check_directory(full_path)
                tarfile.add(full_path,
                            os.path.join(CMSSW_VERSION, directory),
                            recursive=True)

        # Note that data_dirs are only looked-for and added under the src/ folder.
        # /data/ subdirs contain data files needed by the code
        # /interface/ subdirs contain C++ header files needed e.g. by ROOT6
        # /include/ is necessary for C++ headers for UHH2 objects
        data_dirs = ['data', 'interface', 'include']

        # Search for and tar up "data" directories in src/
        src_path = os.path.join(CMSSW_BASE, 'src')
        for root, _, _ in os.walk(src_path):
            if os.path.basename(root) in data_dirs:
                directory = root.replace(CMSSW_BASE, CMSSW_VERSION)
                logger.debug("Adding data directory %s to tarball" % root)
                check_directory(root)