Example #1
0
def fix_sources_eol(dist_dir):
    """Set file EOL for tarball distribution.
    """
    print('Preparing exported source file EOL for distribution...')
    prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
    win_sources = antglob.glob(dist_dir, 
        includes = '**/*.sln **/*.vcproj',
        prune_dirs = prune_dirs)
    unix_sources = antglob.glob(dist_dir,
        includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
        sconscript *.json *.expected AUTHORS LICENSE''',
        excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
        prune_dirs = prune_dirs)
    for path in win_sources:
        fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n')
    for path in unix_sources:
        fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n')
Example #2
0
def sourceforge_web_synchro(sourceforge_project,
                            doc_dir,
                            user=None,
                            sftp='sftp'):
    """Notes: does not synchronize sub-directory of doc-dir.
    """
    userhost = '%s,%[email protected]' % (user, sourceforge_project)
    stdout = run_sftp_batch(userhost, sftp, """
cd htdocs
dir
exit
""")
    existing_paths = set()
    collect = 0
    for line in stdout.split('\n'):
        line = line.strip()
        if not collect and line.endswith('> dir'):
            collect = True
        elif collect and line.endswith('> exit'):
            break
        elif collect == 1:
            collect = 2
        elif collect == 2:
            path = line.strip().split()[-1:]
            if path and path[0] not in ('.', '..'):
                existing_paths.add(path[0])
    upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)])
    paths_to_remove = existing_paths - upload_paths
    if paths_to_remove:
        print 'Removing the following file from web:'
        print '\n'.join(paths_to_remove)
        stdout = run_sftp_batch(
            userhost, sftp, """cd htdocs
rm %s
exit""" % ' '.join(paths_to_remove))
    print 'Uploading %d files:' % len(upload_paths)
    batch_size = 10
    upload_paths = list(upload_paths)
    start_time = time.time()
    for index in xrange(0, len(upload_paths), batch_size):
        paths = upload_paths[index:index + batch_size]
        file_per_sec = (time.time() - start_time) / (index + 1)
        remaining_files = len(upload_paths) - index
        remaining_sec = file_per_sec * remaining_files
        print '%d/%d, ETA=%.1fs' % (index + 1, len(upload_paths),
                                    remaining_sec)
        run_sftp_batch(userhost,
                       sftp,
                       """cd htdocs
lcd %s
mput %s
exit""" % (doc_dir, ' '.join(paths)),
                       retry=3)
Example #3
0
def sourceforge_web_synchro(sourceforge_project, doc_dir,
                             user=None, sftp='sftp'):
    """Notes: does not synchronize sub-directory of doc-dir.
    """
    userhost = '%s,%[email protected]' % (user, sourceforge_project)
    stdout = run_sftp_batch(userhost, sftp, """
cd htdocs
dir
exit
""")
    existing_paths = set()
    collect = 0
    for line in stdout.split('\n'):
        line = line.strip()
        if not collect and line.endswith('> dir'):
            collect = True
        elif collect and line.endswith('> exit'):
            break
        elif collect == 1:
            collect = 2
        elif collect == 2:
            path = line.strip().split()[-1:]
            if path and path[0] not in ('.', '..'):
                existing_paths.add(path[0])
    upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)])
    paths_to_remove = existing_paths - upload_paths
    if paths_to_remove:
        print('Removing the following file from web:')
        print('\n'.join(paths_to_remove))
        stdout = run_sftp_batch(userhost, sftp, """cd htdocs
rm %s
exit""" % ' '.join(paths_to_remove))
    print('Uploading %d files:' % len(upload_paths))
    batch_size = 10
    upload_paths = list(upload_paths)
    start_time = time.time()
    for index in range(0,len(upload_paths),batch_size):
        paths = upload_paths[index:index+batch_size]
        file_per_sec = (time.time() - start_time) / (index+1)
        remaining_files = len(upload_paths) - index
        remaining_sec = file_per_sec * remaining_files
        print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
        run_sftp_batch(userhost, sftp, """cd htdocs
lcd %s
mput %s
exit""" % (doc_dir, ' '.join(paths)), retry=3)
Example #4
0
def update_license_in_source_directories(source_dirs, dry_run, show_diff):
    """Updates license text in C++ source files found in directory source_dirs.
    Parameters:
      source_dirs: list of directory to scan for C++ sources. Directories are
                   scanned recursively.
      dry_run: if True, just print the path of the file that would be updated,
               but don't change it.
      show_diff: if True, print the path of the file that would be modified,
                 as well as the change made to the file. 
    """
    from devtools import antglob
    prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
    for source_dir in source_dirs:
        cpp_sources = antglob.glob(source_dir,
            includes = '''**/*.h **/*.cpp **/*.inl''',
            prune_dirs = prune_dirs)
        for source in cpp_sources:
            update_license(source, dry_run, show_diff)