コード例 #1
0
def create_md5_table():
    out.log('calculating local md5 hashes', 'sync')
    files = []
    md5_table = {}
    for root, dirnames, filenames in os.walk(engine.LOCAL_WWW_DIR):
        #for filename in fnmatch.filter(filenames, '*'):
        for filename in filenames:
            abs_file = os.path.join(root, filename)
            rel_file = abs_file[len(engine.LOCAL_WWW_DIR)+1:]
            if isinstance(rel_file, str):
                rel_file = unicode(rel_file, 'utf-8')
            if NORMALIZE_UNICODE_FILENAMES:
                rel_file = unicodedata.normalize(NORMALIZATION_FORM, rel_file)
            files.append(rel_file)

    for f in files:
        try:
            md5_table[f] = engine.md5sum(f)
        except IndexError:
            pass
        except IOError:
            if not (ignored_file(f) or system_file(f)):
                out.log('could not open ' + f, 'sync', out.LEVEL_WARNING)

    return md5_table
コード例 #2
0
def load_md5_table(filename):
    out.log('loading hash table from ' + filename, 'sync')
    engine.sync_ftp()
    try:
        #load form file
        md5_table_file = open(filename, 'r', encoding='utf-8')
        md5_table = json.load(md5_table_file)
        md5_table_file.close()
    except:
        #if that fails: return empty table
        out.log('unable to load hashtable from ' + filename + ', creating empty table.', 'sync', out.LEVEL_WARNING)
        md5_table = {}

    #normalize unicode entries
    if NORMALIZE_UNICODE_FILENAMES:
        normalized_md5_table = {}
        for key in md5_table:
            if isinstance(key, str):
                key = unicode(key, 'utf-8')
            normalized_key = unicodedata.normalize(NORMALIZATION_FORM, key)
            normalized_md5_table[normalized_key] = md5_table[key]
        md5_table = normalized_md5_table

    #give back
    return md5_table
コード例 #3
0
def copy_wp_config(when_exists='skip'):
    out.log('copy wp-config.php to wordpress directory', 'wordpress')

    #check if wp-config.php already exists
    if overwrite_allowed(engine.LOCAL_WP_DIR + '/wp-config.php', when_exists):
        #copy our config.php to the desired location
        run.local('cp php/wordpress/wp-config.php ' + engine.LOCAL_WP_DIR)
コード例 #4
0
def get_compressed(remote_file,
                   local_file=None,
                   verbose=False,
                   permissions=None,
                   fast_compression=False,
                   preserve_remote_file=False):
    import php
    #compress remote file
    compressed_remote = gzip.compress_remote(remote_file,
                                             fast=fast_compression)
    php.flush_buffer(
    )  #in case we were buffering php execution, it is time to execute now
    #download
    compressed_local = get(compressed_remote,
                           gzip.compressed_filename(local_file), verbose,
                           permissions)
    try:
        filesize = os.path.getsize(compressed_local)
        out.log('downloaded compressed ' + str(filesize / 1000.0) + ' kb file',
                'transfer')
    except OSError:
        pass

    if preserve_remote_file:
        #restore uncompressed remote file
        gzip.uncompress_remote(compressed_remote)
    else:
        #or tell the engine it has been renamed
        engine.rename_remote_file(remote_file, compressed_remote)

    #uncmopress local file
    return gzip.uncompress_local(compressed_local, True)
コード例 #5
0
def pack_remote_list(file_list):
    #tell
    out.log('packing a bunch of remote files', 'tar')

    #validate files
    if not validate_files_list(file_list):
        out.log('Error: files need to be given relative to www dir.', 'tar',
                out.LEVEL_ERROR)
        engine.quit()

    #register tar file
    tar_file = engine.get_new_remote_file('tar')

    #register list file
    list_file_content = "\n".join(file_list)
    list_file = engine.write_remote_file(list_file_content, 'files')

    #assemble packing command. Always pack from www dir
    pack_command = 'tar cf ' + tar_file + ' -C ' + engine.NORM_WWW_DIR + ' --files-from ' + list_file

    #pack!
    run.remote(pack_command)

    #and return packed file, of course
    return tar_file
コード例 #6
0
ファイル: core.py プロジェクト: benguillet/dcor_frontend
def processModule(targetModule, doc_destination, pyPath=None, excluded_modules=[]):
	""" Processes a module given by a target directory path and generates documentation 
	    @input targetModule - the full path to the module to be documented.
	    @input pyPath - if the python path must be updated to help import this module, make those changes.  Defaults to the directory above the requested module.
	    @input excluded_modules - a list of strings representing modules and packages that should not be documented.
	    @return - a list of strings representing the names of the html files created in the documentation process.
	"""

	# process default values for vars that cannot be set to "None"
	if pyPath == None:
		pyPath = targetModule + "/.."


	# Map out all the modules to document
	names = doc.indexPythonModule(targetModule)	
	out.log("Found " + str(len(names)) + " modules to document in module " + str(os.path.basename(targetModule)) )

	# change to the destination directory so pydoc can do its thing
	os.chdir(doc_destination)
	out.debugPrint("Changed working directory to " + os.getcwd())
		
	
	# Generate their documentation
	sys.path.append(os.path.abspath(pyPath))
	filenames = doc.documentModules(names, exclude=excluded_modules, destination=doc_destination )
			
	out.log("Wrote documentation for " + str(len(filenames)) + " modules into directory:\n\t" + str(doc_destination))
	
	return filenames
コード例 #7
0
def cleanup(namespace = None, local = True, remote = True):
    #cleanup all namespaces
    if namespace is None:

        #make a copy of the list before iterating over it
        up_for_delete_list = list(local_tmp_files)
        #cleanup every entry of this list
        for name in up_for_delete_list:
            cleanup(name, local, remote)

        #remove guard and exit
        cleaning_up_already = False
        return

    import transfer
    out.log('Removing tmp files in namespace ' + namespace, 'cleanup', out.LEVEL_DEBUG)
    #remove remote files first, because there removal might cause local files to happen
    if remote:
        for file in remote_tmp_files[namespace]:
            transfer.remove_remote(file)
            remote_tmp_files[namespace] = []

    #then remove local files
    if local:
        for file in local_tmp_files[namespace]:
            transfer.remove_local(file)
            local_tmp_files[namespace] = []
コード例 #8
0
def quit():
    if not finalizing_in_process:
        finalize()
        out.log('Exited with errors. Look at output/output.log for a detailed log.', 'engine', out.LEVEL_ERROR)
        exit()
    else:
        out.log('An error occured during finalizing. We ignore it and try to finish finalization.', 'engine', out.LEVEL_ERROR)
コード例 #9
0
def put_multiple(file_list):
    #split into ascii and non-ascii
    ascii_files, non_ascii_files = engine.split_by_encoding(file_list)

    if engine.FORCE_FTP_FILE_TRANSFER:
        ascii_files = []
        non_ascii_files = file_list

    #pack ascii files, upload compressed and unpack on server
    if len(ascii_files) > 0:
        out.log('uploading files with ascii compatible names', 'transfer')
        local_tar = tar.pack_local_list(ascii_files)
        remote_tar = put_compressed(local_tar)
        tar.unpack_remote(remote_tar)

    #take the non-ascii files and upload them one after another using ftp
    if len(non_ascii_files) > 0:
        out.log('uploading files with non-ascii filename', 'transfer')
        directories = engine.get_all_directories(non_ascii_files)
        for directory in directories:
            create_remote_directory(directory)
        command = ''
        for f in non_ascii_files:
            command += u'put ' + engine.LOCAL_WWW_DIR + '/' + f + u' ' + ftp_path(
                f) + u'\n'
        ftp.execute(command)
コード例 #10
0
def import_local_db(filename, compression=None):
    import os

    #is our file compressed?
    if compression is None:
        compression = gzip.is_compressed(filename)
    if compression:
        #uncompress
        sql_file = gzip.uncompress_local(filename)
    else:
        sql_file = filename

    #wipe old db
    if os.path.isfile(sql_file):
        truncate_local_db()
    else:
        out.log('File not found: ' + filename, 'mysql', out.LEVEL_ERROR)
        quit()

    #refill db
    out.log('importing local database from file ' + sql_file, 'mysql')
    execute_local_file(sql_file)

    #compress again, although it would have been better to not touch the compressed file in the first place
    if compression:
        gzip.compress_local(sql_file)
コード例 #11
0
def remove_command_file():
    out.log('removing command file', 'run')
    try:
        engine.NORM_COMMAND_FILE
    except:
        out.log("Error: Could not find NORM_COMMAND_FILE. No file to remove", 'run', out.LEVEL_WARNING)
        return
    transfer.remove_remote(engine.NORM_COMMAND_FILE)
コード例 #12
0
def create_remote_directory(directory, permissions=None):
    out.log('create remote directory: ' + directory, 'transfer',
            out.LEVEL_VERBOSE)
    command = 'mkdir ' + ftp_path(directory)
    if engine.FTP_CLIENT == 'sftp':
        command = '-' + command
    if permissions is not None:
        command += "\nchmod " + str(permissions) + " " + ftp_path(directory)
    ftp.execute(command)
コード例 #13
0
def initialize(command):
    import out
    import ftp
    out.clear_logfile()

    global enable_ftp_buffer
    if command in enable_ftp_buffer:
        out.log('enabling complete ftp buffering for ' + command + ' command', 'engine', out.LEVEL_DEBUG)
        ftp.start_buffer()
コード例 #14
0
def remote_python_script(script_name, arguments = ''):
    #tell
    out.log('running python script ' + script_name, 'remote', out.LEVEL_VERBOSE)

    #upload script
    remote_script = transfer.put(script_name)

    #run
    remote('HOME=. python ' + remote_script + ' ' + arguments)
コード例 #15
0
 def decorated_func(*args, **kwargs):
     global start_time
     start_time = time.time()
     initialize(command)
     result = func(*args, **kwargs)
     finalize()
     elapsed_time = "{:.3f}".format(time.time() - start_time)
     out.log('Done. Took ' + elapsed_time + ' seconds.')
     return result
コード例 #16
0
def assemble_htaccess_data(domain):
    ht_data = engine.read_local_file(engine.SCRIPT_DIR +
                                     '/htaccess/signature.htaccess')
    ht_data += engine.read_local_file(engine.SCRIPT_DIR +
                                      '/htaccess/common.htaccess')
    if engine.COMPRESSION == 'DEFLATE':
        ht_data += engine.read_local_file(engine.SCRIPT_DIR +
                                          '/htaccess/deflate.htaccess')
    elif engine.COMPRESSION == 'GZIP':
        ht_data += engine.read_local_file(engine.SCRIPT_DIR +
                                          '/htaccess/gzip.htaccess')
    elif engine.COMPRESSION == 'PRECOMPRESSION':
        ht_data += engine.read_local_file(engine.SCRIPT_DIR +
                                          '/htaccess/precompression.htaccess')
    elif engine.COMPRESSION == None or engine.COMPRESSION == False or engine.COMPRESSION == 'NONE':
        pass
    else:
        out.log(
            'Error: Invalid value for COMPRESSION: ' +
            str(engine.COMPRESSION) +
            '. Use DEFLATE, GZIP, PRECOMPRESSION or NONE instead.', 'htaccess',
            out.LEVEL_ERROR)
    #caching only for live site
    if domain == 'live':
        if engine.CACHING == 'DEVELOPMENT':
            ht_data += engine.read_local_file(
                engine.SCRIPT_DIR + '/htaccess/caching-development.htaccess')
        elif engine.CACHING == 'PRODUCTION':
            ht_data += engine.read_local_file(
                engine.SCRIPT_DIR + '/htaccess/caching-production.htaccess')
        elif engine.CACHING == None or engine.CACHING == False or engine.CACHING == 'NONE':
            ht_data += engine.read_local_file(
                engine.SCRIPT_DIR + '/htaccess/caching-none.htaccess')
        else:
            out.log(
                'Warning: You have not specified a valid browser caching strategy: '
                + str(engine.CACHING) +
                '. Use PRODUCTION, DEVELOPMENT or NONE instead.', 'htaccess',
                out.LEVEL_WARNING)
    else:
        #explicitly disable browsercaching for all assets for local development site
        ht_data += engine.read_local_file(engine.SCRIPT_DIR +
                                          '/htaccess/caching-none.htaccess')
    if os.path.isfile(engine.LOCAL_WWW_DIR + '/.htaccess.custom'):
        ht_data += engine.read_local_file(engine.LOCAL_WWW_DIR +
                                          '/.htaccess.custom')
    if engine.IS_WORDPRESS:
        ht_data += engine.read_local_file(engine.SCRIPT_DIR +
                                          '/htaccess/wordpress.htaccess')

    if domain == 'live':
        if engine.NEED_BASIC_AUTH:
            ht_data += engine.read_local_file(engine.SCRIPT_DIR +
                                              '/htaccess/basic-auth.htaccess')

    return ht_data
コード例 #17
0
def save_to_php(filename, data):
    content = "<?php return "
    if type(data) == str:
        content += "'" + data + "';"
    else:
        out.log('save_to_php does not support type ' + str(type(data)) + '.',
                'compile', out.LEVEL_ERROR)
        engine.quit()

    engine.write_local_file(content, filename=filename)
コード例 #18
0
def write_local_file(content, suffix = None, permissions = None, filename = None):
    if permissions is not None:
        out.log('Error: Setting permissions in write_local_file is not implemented yet.', 'engine', out.LEVEL_ERROR)
    if filename is None:
        filename = get_new_local_file(suffix)
    if isinstance(content, str):
        content = unicode(content, 'utf-8' )
    file = open(filename, 'w', encoding='utf-8')
    file.write(content)
    file.close()
    return filename
コード例 #19
0
def rename_file(from_file, to_file, file_list):
    out.log('Registered file for renaming ' + from_file + ' -> ' + to_file, 'engine', out.LEVEL_DEBUG)
    global current_tmp_file_namespace
    #define filter function
    def filter(filename):
        if filename == from_file:
            return to_file
        else:
            return filename
    #filter list with that functino
    file_list[current_tmp_file_namespace] = [filter(filename) for filename in file_list[current_tmp_file_namespace]]
コード例 #20
0
def remove_remote(filename):
    out.log('remove remote file: ' + filename, 'transfer')
    if engine.FTP_CLIENT == 'ftp':
        command = 'delete ' + ftp_path(filename)
    elif engine.FTP_CLIENT == 'sftp':
        command = 'rm ' + ftp_path(filename)
    else:
        out.log('Unknown ftp client ' + engine.FTP_CLIENT, 'transfer',
                out.LEVEL_ERROR)
        engine.quit()
    ftp.execute(command)
コード例 #21
0
def remote_remote_directory_contents(directory):
    out.log('remove content of remote directory: ' + dircetory, 'transfer',
            out.LEVEL_VERBOSE)
    if engine.FTP_CLIENT == 'ftp':
        command = 'delete ' + ftp_path(directory) + '/*'
    elif engine.FTP_CLIENT == 'sftp':
        command = 'rm ' + ftp_path(directory) + '/*'
    else:
        out.log('Unknown ftp client ' + engine.FTP_CLIENT, 'transfer',
                out.LEVEL_ERROR)
        engine.quit()
    ftp.execute(command)
コード例 #22
0
def prepare(overwrite, upload=False):
    htaccess_file = os.path.abspath(engine.LOCAL_WWW_DIR + '/.htaccess')
    if os.path.isfile(htaccess_file):
        if overwrite == 'fail':
            out.log(
                '.htaccess already exists. Specify overwrite as parameter or delete the file manually before trying again.',
                'htaccess', out.LEVEL_ERROR)
            engine.quit()
        if overwrite == 'overwrite':
            run.local('rm ' + htaccess_file)
    if upload:
        transfer.remove_remote('.htaccess')
コード例 #23
0
def remote(command, halt_on_output = False):
    #tell what happens
    out.log(command, 'remote', out.LEVEL_VERBOSE)

    #choose the correct command system
    if engine.COMMAND_SYSTEM == 'PHP':
        php.execute(command, halt_on_output)
    elif engine.COMMAND_SYSTEM == 'SSH':
        ssh.execute(command)
    else:
        #even less implemented
        out.log("Error Unknown COMMAND_SYSTEM " + engine.COMMAND_SYSTEM, 'remote', out.LEVEL_ERROR)
        engine.quit()
コード例 #24
0
def remove_remote_multiple(file_list):
    out.log('removing multiple remote files', 'transfer')
    command = ''
    for file in file_list:
        if engine.FTP_CLIENT == 'ftp':
            command += 'delete ' + ftp_path(file) + '\n'
        elif engine.FTP_CLIENT == 'sftp':
            command += 'rm ' + ftp_path(file) + '\n'
        else:
            out.log('Unknown ftp client ' + engine.FTP_CLIENT, 'transfer',
                    out.LEVEL_ERROR)
            engine.quit()
    ftp.execute(command)
コード例 #25
0
def create_remote_md5_table():
    #register new remote file
    remote_md5_table_file = engine.get_new_remote_file('json')
    #run script to create md5 hashes on the remote
    out.log('calculating remote md5 hashes', 'sync')
    run.remote_python_script(engine.SCRIPT_DIR + '/py/scripts/create_md5_table.py', engine.NORM_WWW_DIR + ' ' + remote_md5_table_file)
    #download table file
    out.log('download remote hash table', 'sync', out.LEVEL_VERBOSE)
    md5_table_file = transfer.get(remote_md5_table_file)
    #load it
    md5_table = load_md5_table(md5_table_file)
    #and return it
    return md5_table
コード例 #26
0
def optimize_jpg(quality = 100):
    out.log('optimizing jpgs...', 'image')
    for root, dirnames, filenames in os.walk(engine.LOCAL_WWW_DIR):
        for filename in fnmatch.filter(filenames, '*.jpg'):
            if os.path.getsize(root + '/' + filename) > 20000:
                progressive = '--all-progressive'
            else:
                progressive = ''
            if quality < 100:
                quality = '-m' + str(quality)
            else:
                quality = ''
                run.local('jpegoptim ' + progressive + ' ' + quality + ' "' + root + '/' + filename + '"', ignore_exit_code = True)
コード例 #27
0
def remove_command_file():
    global command_file_ready
    if not command_file_ready:
        return
    out.log('removing command file', 'php', out.LEVEL_VERBOSE)
    try:
        engine.NORM_COMMAND_FILE
    except:
        out.log("Error: Could not find NORM_COMMAND_FILE. No file to remove",
                'php', out.LEVEL_WARNING)
        return
    transfer.remove_remote(engine.NORM_COMMAND_FILE)
    command_file_ready = False
コード例 #28
0
def po():
    out.log('looking for .po files recursively...', 'compile',
            out.LEVEL_VERBOSE)
    compiled_files = 0
    files = []
    for root, dirnames, filenames in os.walk(engine.LOCAL_WWW_DIR):
        for filename in fnmatch.filter(filenames, '*.po'):
            files.append(os.path.join(root, filename))

    for po in files:
        mo = po[:-3] + '.mo'
        # needs to be refreshed if
        # 1. there is no .mo file
        # 2. the .mo file is out of date
        # 3. the .mo file is not placed in a folder named 'orig'
        if (not os.path.isfile(mo)
                or os.path.getmtime(po) > os.path.getmtime(mo)) and (
                    not os.path.split(os.path.dirname(po))[1] == 'orig'):
            out.log('compiling ' + po, 'compile')
            run.local('msgfmt -o ' + mo + ' ' + po)
            compiled_files += 1
        else:
            out.log('skipping ' + po, 'compile', out.LEVEL_VERBOSE)

    out.log('all .mo files up to date.', 'compile')
コード例 #29
0
def execute(command):
    out.log(command, 'ftp', out.LEVEL_VERBOSE)
    #write the command into file
    ftp_file = engine.write_local_file(command, 'ftp')

    #run the ftp file
    if engine.FTP_CLIENT == 'ftp':
        ftp_command_line = 'ftp -i ' + engine.FTP_PROTOCOL + '://' + engine.escape(
            engine.FTP_USER) + ':' + engine.escape(
                engine.FTP_PASSWORD) + '@' + engine.FTP_HOST + ' <' + ftp_file
    elif engine.FTP_CLIENT == 'sftp':
        ftp_command_line = 'sftp -b ' + ftp_file + ' ' + engine.escape(
            engine.FTP_USER) + '@' + engine.FTP_HOST
    run.local(ftp_command_line, retry=3)
コード例 #30
0
def get_database_file_description(domain = None):
    if domain == 'local':
        description = 'dump-localhost'
    elif domain == 'remote':
        try:
            description = 'dump-' + LIVE_DOMAIN
        except NameError:
            description = 'dump-remote-unknown'
    elif domain is None:
        description = 'dump'
    else:
        out.log('Unknown domain: ' + str(domain), 'engine', out.LEVEL_ERROR)
        quit()

    return description
コード例 #31
0
def append_to_hosts():
    try:
        engine.LOCAL_DOMAIN
    except:
        out.log(
            'Your LOCAL_DOMAIN variable has not been set. Please specify a valid hostname and try again.'
        )
        return
    out.log(
        'appending ' + engine.LOCAL_DOMAIN + ' to ' + engine.LOCAL_ETC_HOSTS,
        'apache')
    run.local('echo "127.0.0.1  ' + engine.LOCAL_DOMAIN +
              ' # appended by fabric deploy script." >>' +
              engine.LOCAL_ETC_HOSTS,
              sudo=True)
コード例 #32
0
ファイル: core.py プロジェクト: benguillet/dcor_frontend
def writeDjangoUrlConfig(filenames, urlConfig_destination, urlConfig_name="apiUrls.py", templatePrefix='api/'):
	""" Creates a Django-style URL Configuration file for all the filenames specified.
	    @input filenames - the file names to write into the urlConfig file
	    @input urlConfig_destination - the destination for the apiUrls file
	    @input urlConfig_name - urlconfig file name (default: 'apiUrls.py')
	    @input templatePrefix - In the render_to_response call, should we look in a subdirectory of the templates dir?  (default: 'api/')
	    @return - none
	""" 
	
	
	os.chdir(urlConfig_destination)
	out.debugPrint("Changed working directory to " + os.getcwd())
    
	f = open(urlConfig_name,'w')    # open the file for non-append writing
	out.log("Opened file for writing:\n\t" + os.path.abspath(f.name))

	f.write(out.generateUrlPatterns(filenames, templatePrefix)) # write the urlpatterns to the file
	
	out.log("Wrote apiUrls.py file.")
	
	# finally, close the file
	f.close()
コード例 #33
0
ファイル: core.py プロジェクト: benguillet/dcor_frontend
def setUpDjango(djangoProjectPath, djangoSettingsModule="settings"):
	""" Sets up the django environment for a given project to help with the importing of django-related modules.
	    @input djangoProjectPath - the full directory path to the django project we'll be using.
	    @input djangoSettingsModule - the name of the python module to use as the settings for this project.  (default = "settings")
	""" 
	
	# generate the full module name for the settings module
	settingsModuleName = os.path.basename(djangoProjectPath) + "." + djangoSettingsModule
	try:
		# do all the important settings stuff 
		sys.path.append(os.path.abspath(djangoProjectPath+'/..'))
		os.environ['DJANGO_SETTINGS_MODULE'] = settingsModuleName
		from django.core.management import setup_environ
		settingsModule = importer.importModule(settingsModuleName)
		out.log("Configured PATH, loaded Django settings.")
		
		setup_environ(settingsModule)
		out.log("Set up Django Environment for project " + os.path.basename(djangoProjectPath)+".")
	except Exception as e:
		out.log("Could not set up Django environment - " + str(e), err=True)
		out.log("Is the Django settings module really called " + settingsModuleName + " ?")
		out.log("Proceeding anyway.  Many of the module imports may fail.", err=True)
コード例 #34
0
ファイル: doc.py プロジェクト: benguillet/dcor_frontend
def documentModules(moduleNames, exclude=[], destination=".", Path=None):
    """ Generates pydoc documentation for each module name given and outputs the HTML files into the destination directory.
        @input moduleNames - a list of names for the modules to document.
        @input exclude - a list of module and package names that should NOT be documented
        @input destination - a string indicating the directory path where the documentation HTML should be written.  Defaults to "."
        @input Path - any specific PATH to use?
        @return - a list of the filenames of all html files which were written.
    """

    # update the path variable with any special info
    sys.path.append(Path)
    
    writtenFiles = [] # list for all files that have been written
    
    # change to the appropriate directory
    os.chdir(destination)
    
    # loop through all the module names we were given
    for modName in moduleNames:
        
        # filter out any excluded modules
        for x in exclude:
            if modName.find(x) != -1:
                out.log("Skipping module " + modName)
                modName = ""

        # filter out bogus module names
        if modName == "":
            continue
    

        # import the module and write out the documentation for it.
        try:
            M = importModule(modName, Path=Path)

	    out.log("",nl=False)

            pydoc.writedoc(M)

            writtenFiles.append(modName+".html")
        except ImportError as e: # print error msg and proceed to next object
            out.log("Could not import module " + modName + " - " + str(e), err=True)
            continue

    return writtenFiles