Example #1
0
    def handle(self, options, global_options, *args):
        from utils import extract_dirs, pkg, extract_file
        from shutil import copy
        
        if os.path.exists('conf.py'):
            has_conf = True
        else:
            has_conf = False
        extract_dirs('parm', 'templates/env', '.', exclude=['conf.py'])
        if not os.path.exists('static'):
            os.makedirs('static')
            
        d = {}
        
        d['project'] = 'Parm'
        d['copyright'] = '2013, Limodou'
        d['version'] = '1.0'
        
        if has_conf:
            create = get_answer("Create config file", quit='q') == 'Y'
        
        if not has_conf or (has_conf and create):
            d['project'] = get_input("Project name [Parm]:", default=d['project'])
            d['copyright'] = get_input("Copyright information [%s]:" % d['copyright'], default=d['copyright'])
            d['version'] = get_input("Version [%s]:" % d['version'], default=d['version'])

            extract_file('parm', 'templates/env/conf.py', '.')
            text = template.template_file('conf.py', d).replace('\r\n', '\n')
            f = open('conf.py', 'wb')
            f.write(text)
            f.close()
Example #2
0
    def _get_or_download_model(self, download: bool) -> Optional[str]:
        """
        Return downloaded model path, if model path does not exits and download is true, it will download
        and return the path
        Args:
            download: flag to decide whether to download model in case it not exists
        Returns:
            str: model path or None
        """
        home_dir = home_directory()
        downloaded_models_dir = os.path.join(home_dir, MODELS_DIR)

        if not os.path.exists(downloaded_models_dir):
            os.makedirs(downloaded_models_dir)

        model_hashed_name = get_hashed_name(self.embedding + self.model)
        model_path = os.path.join(downloaded_models_dir, model_hashed_name)

        if not os.path.exists(model_path):
            if not download:
                return

            model_download_path = model_path + '.' + self.embedding_cls.EMBEDDING_MODELS[
                self.model].format
            model_download_url = self.embedding_cls.EMBEDDING_MODELS[
                self.model].download_url
            print(f"Model does not exists, Downloading model: {self.model}")
            download_from_url(model_download_url, model_download_path)
            extract_file(model_download_path, model_path)
            os.remove(model_download_path)
            print(f"Model downloaded successfully!")
        return model_path
def main():
    url = 'http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar'  # NOQA
    dataset_dir = chainer.dataset.get_dataset_directory('pascal')
    path = os.path.join(dataset_dir, 'VOCtrainval_11-May-2012.tar')
    utils.cached_download(
        url,
        path=path,
        md5='6cd6e144f989b92b3379bac3b3de84fd',
    )
    utils.extract_file(path, to_directory=dataset_dir)
Example #4
0
def search_jdk():
    if jdk_zip_exists():
        locales.adv_print(f"JDK_ZIP_ALREADY_EXISTS",
                          variables={"zipfile": settings["jdk_zip_name"]})
        utils.extract_file(settings["jdk_zip_name"])
        os.remove(settings["jdk_zip_name"])
    for file in os.listdir():
        if "jdk" in file and not os.path.isfile(os.path.join(".", file)):
            utils.set_java_home(file)
            return True
    jdk = os.environ.get("JAVA_HOME")
    # why tf your jdk points to recycle bin bitch are you retarted
    return settings["skip_jdk_checks"] or (jdk is not None
                                           and utils.verify_path(Path(jdk)))
Example #5
0
def install(options):
    create_dir(DOWNLOAD_DIR)

    download(
        "https://download.java.net/java/GA/jdk14.0.2/205943a0976c4ed48cb16f1043c5c647/12/GPL/openjdk-%s_windows"
        "-x64_bin.zip" % OPENJDK_VERSION,
        where=DOWNLOAD_DIR)
    download(
        "http://apache.40b.nl/tomcat/tomcat-9/v%s/bin/apache-tomcat-%s-windows-x64.zip"
        % (TOMCAT_VERSION, TOMCAT_VERSION),
        where=DOWNLOAD_DIR)
    download(
        "https://github.com/oracle/opengrok/releases/download/%s/opengrok-%s.tar.gz"
        % (OPENGROK_VERSION, OPENGROK_VERSION),
        where=DOWNLOAD_DIR)
    download(
        "https://github.com/universal-ctags/ctags-win32/releases/download/2020-07-22%2F631690ad/ctags-2020-07"
        "-22_631690ad-x64.zip",
        where=DOWNLOAD_DIR)

    create_dir(INSTALLED_DIR)

    extract_file(os.path.join(
        DOWNLOAD_DIR, "openjdk-%s_windows-x64_bin.zip" % OPENJDK_VERSION),
                 where=INSTALLED_DIR)
    extract_file(os.path.join(
        DOWNLOAD_DIR, "apache-tomcat-%s-windows-x64.zip" % TOMCAT_VERSION),
                 where=INSTALLED_DIR)
    extract_file(os.path.join(DOWNLOAD_DIR,
                              "opengrok-%s.tar.gz" % OPENGROK_VERSION),
                 where=INSTALLED_DIR)
    extract_file(os.path.join(DOWNLOAD_DIR,
                              "ctags-2020-07-22_631690ad-x64.zip"),
                 where=INSTALLED_DIR)

    cataline_home = find_catalina_home()
    create_dir(os.path.join(cataline_home, "webapps", NAME))
    extract_file(os.path.join(INSTALLED_DIR, "opengrok-%s" % OPENGROK_VERSION,
                              "lib", "source.war"),
                 where=os.path.join(cataline_home, "webapps", NAME))

    check_java_installed()

    print("[+] Ready to index the source directory!")
Example #6
0
def search_jdk():
    if jdk_zip_exists():
        locales.adv_print(f"JDK_ZIP_ALREADY_EXISTS", variables={"zipfile": settings["jdk_zip_name"]})
        utils.extract_file(settings["jdk_zip_name"])
        os.remove(settings["jdk_zip_name"])
    for file in os.listdir():
        jdk_path = os.path.join(os.getcwd(), file)
        if "jdk" in file and not os.path.isfile(jdk_path) and utils.verify_path(Path(jdk_path)):
            extend_path(jdk_path)
            return True
    p = utils._Path(settings["jdk_installation_path"])
    for file in p.listdir():
        jdk_path = os.path.join(p.value, file)
        if "jdk" in file and os.path.isdir(jdk_path) and utils.verify_path(Path(jdk_path)):
            extend_path(os.path.join(p.value, file))
            utils.set_java_home(os.path.join(p.value, file))
            return True
    jdk = os.environ.get("JAVA_HOME")
    return settings["skip_jdk_checks"] or (jdk is not None and utils.verify_path(Path(jdk)))
Example #7
0
    def __init__(self, args, root, transform=None, is_training=False):
        # Dataset year
        self.year = args.year
        if args.year == "2007" and is_training == False:
            year = "2007-test"

        # Dataset root directory
        self.root = root
        if not os.path.exists(self.root):
            os.makedirs(self.root, exist_ok=True)

        # get the url
        self.url = DATASET_DICT[args.year]['url']
        self.filename = DATASET_DICT[args.year]['filename']
        self.md5 = DATASET_DICT[args.year]['md5']
        self.transforms = transform

        # download file if it is not yet available locally
        if args.download:
            file = download_url(DATASET_DICT[year]['url'], self.root,
                                DATASET_DICT[year]['filename'],
                                DATASET_DICT[year]['md5'])
            extract_file(os.path.join(self.root, file), self.root)

        voc_root = os.path.join(self.root, DATASET_DICT[year]['base_dir'])
        if not os.path.isdir(voc_root):
            raise RuntimeError("Dataset not found/currepted. " \
                               "Check the path/ use download = True option")
        img_dir = os.path.join(voc_root, "JPEGImages")
        seg_dir = os.path.join(voc_root, "SegmentationClass")
        split_dir = os.path.join(voc_root, "ImageSets", "Segmentation")
        if is_training:
            split = os.path.join(split_dir, "train" + ".txt")
        else:
            split = os.path.join(split_dir, "test" + ".txt")

        with open(split) as f:
            item_names = [x.rstrip("\n") for x in f.readlines()]

        self.images = [os.path.join(img_dir, x + ".jpg") for x in item_names]
        self.segmask = [os.path.join(seg_dir, x + ".png") for x in item_names]
        assert (len(self.images) == len(self.segmask))
Example #8
0
def inline_buttons_handler(bot, update):
    from app import app, db

    query = update.callback_query
    chat_id = query.message.chat_id

    logger.debug("Got an inline button action: %s" % query.data)
    bot.send_chat_action(chat_id=chat_id, action=telegram.ChatAction.TYPING)
    # Try to get params
    try:
        params = json.loads(query.data)
        action = params.get("action")
        userfile_id = int(params.get("uf"))
    except Exception as e:
        logger.error(e)
        bot.send_message(
            chat_id=chat_id,
            text="\n".join(
                [
                    "Упс! Что-то пошло не так 😱",
                    "Передайте это администратору, чтобы он все исправил:",
                    "Query data: %s" % query.data,
                    "Exception: %s" % e,
                ]
            ),
        )
        raise

    # Try to get info about file from db
    file_info = get_file_info(bot, userfile_id)
    if action in ACTIONS_MAPPING:
        outfile = os.path.join(
            app.config["PROCESSED_DIR"],
            "%s %s %s.zip"
            % (
                remove_extension(file_info["filename"]),
                file_info["userfile_id"],
                action,
            ),
        )
        bot.send_message(text="Сейчас посмотрю...⏳", chat_id=chat_id)
        try:
            extract_file(bot, chat_id, file_info)
            statuses = ACTIONS_MAPPING[action](file_info["extract_path"])

            if any(statuses.values()):
                zipdir(file_info["extract_path"], outfile)
                bot.send_message(chat_id=chat_id, text="Готово!🚀")
                bot.send_document(
                    chat_id=chat_id,
                    document=open(outfile, "rb"),
                    filename=os.path.basename(outfile),
                    reply_to_message_id=file_info["message_id"],
                )
                if not all(statuses.values()):
                    message = "⚠️ Следующие файлы не удалось обработать: ⚠️\n"
                    for file, status in statuses.items():
                        if not status:
                            file_path = os.path.relpath(
                                file, file_info["extract_path"]
                            )
                            # Telegram has limit for message length, so we
                            # split the message in case it is too long (> 4096)
                            if len(message) + len(file_path) + 10 < 4096:
                                message += f"\n ❌ {file_path}"
                            else:
                                bot.send_message(chat_id=chat_id, text=message)
                                message = f" ❌ {file_path}"
                    bot.send_message(chat_id=chat_id, text=message)
            else:
                bot.send_message(
                    chat_id=chat_id,
                    text="Не удалось обработать данные. Проверьте, что файлы предоставлены в нужном формате.",
                )
        except Exception as e:
            logger.error(e)
            bot.send_message(
                chat_id=chat_id,
                text="\n".join(
                    [
                        "Упс! Что-то пошло не так 😱",
                        "Передайте это администратору, чтобы он все исправил:",
                        "Query data: %s" % query.data,
                        "Exception: %s" % e,
                    ]
                ),
            )
            raise
    else:
        bot.send_message(
            chat_id=chat_id,
            text="Данная команда в процессе реализации и пока не доступна 😞",
        )
    return "OK"
Example #9
0
original_path = os.path.join(data_dir, config['original_dir'])
clean_path = os.path.join(data_dir, config['clean_dir'])
removed_path = os.path.join(data_dir, config['removed_dir'])
os.makedirs(data_dir, exist_ok=True)

# Download all
os.makedirs(downloads_path, exist_ok=True)
for filename, meta in config['dataset_urls'].items():
    file_path = os.path.join(downloads_path, filename)
    download_file(meta['url'], file_path, meta['md5'])

# Extract all
for filename in config['dataset_urls'].keys():
    if filename.endswith('.zip'):
        file_path = os.path.join(downloads_path, filename)
        extract_file(file_path, original_path)

# Replace incorrect file
for replace_file in config['replace']:
    print('Replacing "%s" with "%s"' %
          (os.path.join(replace_file['dir'], replace_file['filename_from']),
           os.path.join(replace_file['dir'], replace_file['filename_to'])))
    os.remove(
        os.path.join(original_path, replace_file['dir'],
                     replace_file['filename_from']))
    shutil.copyfile(
        os.path.join(downloads_path, replace_file['filename_to']),
        os.path.join(original_path, replace_file['dir'],
                     replace_file['filename_to']))
print('done.')
"""
import os
from sklearn.model_selection import train_test_split
from keras.applications.mobilenet_v2 import MobileNetV2
from keras.layers import Dense, Dropout
from keras.models import Model
from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau, EarlyStopping
from utils import generator_batch, extract_file, categorical_crossentropy_label_smoothing, my_categorical_crossentropy_label_smoothing
from keras.callbacks import TensorBoard
import config

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

# ---------------------------------------提取数据-划分数据集-----------------------------
img_path, person_id_original_list, nbr_persion_ids = extract_file(config.DATA_FOLDER, data="train")

train_img_path, val_img_path, train_ids, val_ids = train_test_split(img_path, person_id_original_list, test_size=0.2,
                                                                    random_state=2020)
print("numbers of train images:", len(train_img_path))
print("numbers of val images:", len(val_img_path))

# ---------------------------------------backbone------------------------------
weight_path = "/students/julyedu_510477/PersonReID_project/MobileNetV2/mobilenet_v2_weights_tf_dim_ordering_tf_kernels_0.5_96_no_top.h5"

backbone = MobileNetV2(weights=weight_path, input_shape=(config.IMG_HIGHT, config.IMG_WIGTH, 3), include_top=False, alpha=0.5,
                       pooling='max')

# backbone.summary()

gobal_pool = backbone.get_layer(index=-1).output
Example #11
0
    return res


#HANDLE = c_void_p #linux and win32
RAROpenArchiveEx = _c_func(unrarlib.RAROpenArchiveEx, c_void_p,
                            [POINTER(RAROpenArchiveDataEx), ],
                            _check_open_result)

#files data
RARReadHeaderEx = _c_func(unrarlib.RARReadHeaderEx, c_int,
                        [c_void_p, POINTER(RARHeaderDataEx)],
                        _check_result)

#Moves the current position in the archive to the next file.
RARProcessFileW = _c_func(unrarlib.RARProcessFileW, c_int,
                        [c_void_p, c_int, c_wchar_p, c_wchar_p],
                        _check_result)

#Close RAR archive and release allocated memory.
RARCloseArchive = _c_func(unrarlib.RARCloseArchive, c_int,
                        [c_void_p, ],
                        _check_result)


if __name__ == "__main__":

    import utils

    passwords = ["algo", "asdasdqwe", "some"]
    utils.extract_file("C:\\Users\\Admin\\Desktop\\evtc.part1.rar", "C:\\Users\\Admin\\Desktop", passwords)
Example #12
0
def _check_result(res, func, args):
    err_check(res)
    return res


#HANDLE = c_void_p #linux and win32
RAROpenArchiveEx = _c_func(unrarlib.RAROpenArchiveEx, c_void_p, [
    POINTER(RAROpenArchiveDataEx),
], _check_open_result)

#files data
RARReadHeaderEx = _c_func(unrarlib.RARReadHeaderEx, c_int,
                          [c_void_p, POINTER(RARHeaderDataEx)], _check_result)

#Moves the current position in the archive to the next file.
RARProcessFileW = _c_func(unrarlib.RARProcessFileW, c_int,
                          [c_void_p, c_int, c_wchar_p, c_wchar_p],
                          _check_result)

#Close RAR archive and release allocated memory.
RARCloseArchive = _c_func(unrarlib.RARCloseArchive, c_int, [
    c_void_p,
], _check_result)

if __name__ == "__main__":

    import utils

    passwords = ["algo", "asdasdqwe", "some"]
    utils.extract_file("C:\\Users\\Admin\\Desktop\\evtc.part1.rar",
                       "C:\\Users\\Admin\\Desktop", passwords)
Example #13
0
def update_item(self, item,dispose_dia = True):
    post_ga_event('update_item','artifact_%s' % item.name)
    try:
        if item.create_delta:
            for deltaitem in item.deltas:
                if not utils.check_if_already_updated_with_delta(dir_name, item.name, deltaitem.version):
                    log_dia_info('Updating file %s' % deltaitem.file)
                    retries = 0
                    nof_retries = 3
                    while retries < nof_retries:
                        utils.get_file(utils.resolve_delta_dir(item),utils.get_storage_location(deltaitem.file),deltaitem.file,item.name)
                        dia_tick()

                        if was_dia_cancelled():
                            log_dia_info('Cancelling...')
                            break

                        if item.dynamic_import:
                            utils.delete_folder(utils.user_file_cache_dyn)
                            utils.create_dir(utils.user_file_cache_dyn)
                            try:
                                if was_dia_cancelled():
                                    log_dia_info('Cancelling...')
                                    break

                                utils.extract_file(utils.get_storage_location(deltaitem.file), utils.user_file_cache_dyn,item,True)

                                if was_dia_cancelled():
                                    log_dia_info('Cancelling...')
                                    break

                                dynamic_import.move_in_place(utils.user_file_cache + 'dyn/%s/' % item.folder, '%s/%s/' % (dir_name, item.folder))

                                if was_dia_cancelled():
                                    log_dia_info('Cancelling...')
                                    update_tree_view(self)
                                    break

                                utils.update_db(dir_name, item.name, deltaitem.version)
                                utils.delete_folder(utils.user_file_cache + 'dyn/%s/' % item.folder)
                                item.needs_update = False
                                update_tree_view(self)
                                dia_tick()
                                break

                            except FtpOverloadedException:
                                post_ga_event('update_item_ftp_overload','artifact_%s' % deltaitem.file)
                                raise

                            except InvalidZipFileException as e:
                                post_ga_event('update_item_invalid_zip','artifact_%s' % deltaitem.file)
                                utils.delete_file(utils.get_storage_location(deltaitem.file))
                                if retries == nof_retries-1:
                                    raise

                        else:
                            log_info('Unpacking %s into %s' % (item.name, dir_name))
                            try:
                                if was_dia_cancelled():
                                    log_dia_info('Cancelling...')
                                    break
                                utils.extract_file(utils.get_storage_location(deltaitem.file), dir_name,item,False)
                                if was_dia_cancelled():
                                    log_dia_info('Cancelling...')
                                    update_tree_view(self)
                                    break
                                utils.update_db(dir_name, item.name, deltaitem.version)
                                target_folder = dir_name + '/' + item.folder
                                log_dia_info('Updated %s with deltafile %s at location %s' % (item.name,deltaitem.file,target_folder))
                                item.needs_update = False
                                update_tree_view(self)
                                dia_tick()
                                if utils.get_boolean_user_setting(delete_files_after_install):
                                    utils.delete_file(utils.get_storage_location(deltaitem.file))
                                break

                            except FtpOverloadedException:
                                post_ga_event('update_item_ftp_overload','artifact_%s' % deltaitem.file)
                                raise

                            except InvalidZipFileException:
                                post_ga_event('update_item_invalid_zip','artifact_%s' % deltaitem.file)
                                log_dia_info('Invalid deltazifile, delete and retry')
                                utils.delete_file(utils.get_storage_location(deltaitem.file))
                                if retries == nof_retries-1:
                                    raise

                        retries += 1
        if dispose_dia:
            wx.CallAfter(dispose_dialog)

    except FtpOverloadedException:
        if dispose_dia:
            wx.CallAfter(dispose_dialog_fail,'Too many users right now, please try again later')

    except InvalidZipFileException as e:
        if dispose_dia:
            wx.CallAfter(dispose_dialog_fail,e.message)
    except:
        if dispose_dia:
            wx.CallAfter(dispose_dialog_fail,'Unknown error %s:' % sys.exc_info()[0])
    finally:
        update_tree_view(self)
Example #14
0
def install_item(self, current_item, dispose_dia = True):
    post_ga_event('install_item','artifact_%s' % current_item.name)
    folder = '%s/%s/' % (dir_name, current_item.folder)
    if not utils.check_if_already_updated_with_delta(dir_name, current_item.name, current_item.version) or not os.path.exists(folder):
        try:
            log_dia_info('Getting full entry %s' % current_item.name)
            retries = 0
            nof_retries = 3
            while retries < nof_retries:
                try:
                    utils.get_file(current_item.basedir,utils.get_storage_location(current_item.file),current_item.file,current_item.name)

                    if os.path.exists(folder):
                        log_dia_info('Deleting current folder %s (this may take a while, please be patient)' % folder)
                        utils.delete_folder('%s/%s/' % (dir_name, current_item.folder))

                    if was_dia_cancelled():
                        log_dia_info('Downloaded for %s was cancelled' % current_item.name)
                        if dispose_dia:
                            wx.CallAfter(dispose_dialog)
                        return

                    log_dia_info('Downloaded %s' % current_item.name)
                    log_dia_info('Extracting files %s' %dir_name)

                    dia_tick()
                    utils.extract_file(utils.get_storage_location(current_item.file), dir_name,current_item,True)
                    dia_tick()
                    break;

                except InvalidZipFileException:
                    post_ga_event('install_item_invalid_zip','artifact_%s' % current_item.name)
                    log_info('Invalid zifile, delete and retry')
                    utils.delete_file(utils.get_storage_location(current_item.file))
                    if retries == nof_retries-1:
                        raise

                retries+=1

            if utils.get_boolean_user_setting(delete_files_after_install):
                utils.delete_file(utils.get_storage_location(current_item.file))

            if was_dia_cancelled():
                if dispose_dia:
                    wx.CallAfter(dispose_dialog)
                return

            log_dia_info('Update db')
            utils.update_db(dir_name, current_item.name, current_item.version)
            current_item.not_installed = False

            log_dia_info('Done extracting full entry %s at location %s' % (current_item.name, dir_name))
            log_dia_info('Install done')

            if dispose_dia:
                wx.CallAfter(dispose_dialog)

        except InvalidZipFileException as e:
            utils.delete_folder('%s/%s/' % (dir_name, current_item.folder))
            log_dia_info('Install failed du to error during fetch or unzip')
            if dispose_dia:
                wx.CallAfter(dispose_dialog_fail,e.message)

        except FtpOverloadedException as e:
            log_dia_info('Too many users, please try agin in a while')
            if dispose_dia:
                wx.CallAfter(dispose_dialog_fail,'Too many users, please try later')

        except:
            utils.delete_folder('%s/%s/' % (dir_name, current_item.folder))
            log_dia_info('Install failed due to unknown error')
            if dispose_dia:
                wx.CallAfter(dispose_dialog_fail,'Unknown error %s:' % sys.exc_info()[0])
        finally:
            update_tree_view(self)
    else:
        if dispose_dia:
            wx.CallAfter(dispose_dialog)
Example #15
0
    def handle(self, options, global_options, *args):
        from utils import extract_dirs, pkg, extract_file
        from shutil import copy
        
        if os.path.exists('conf.py'):
            has_conf = True
        else:
            has_conf = False
            
        d = {}
        
        d['project'] = getattr('conf', 'project', 'Parm')
        d['copyright'] = getattr('conf', 'copyright', '2013, Limodou')
        d['version'] = getattr('conf', 'version', __version__)
        d['theme'] = getattr(conf, 'theme', 'semantic')
        d['template_dirs'] = getattr(conf, 'template_dirs', 'templates')
        d['disqus'] = getattr(conf, 'disqus', '')
        
        if has_conf:
            create = get_answer("Create config file", quit='q') == 'Y'
        
        if not has_conf or (has_conf and create):
            d['project'] = get_input("Project name [%s]:"%d['project'], default=d['project'])
            d['copyright'] = get_input("Copyright information [%s]:" % d['copyright'], default=d['copyright'])
            d['version'] = get_input("Version [%s]:" % d['version'], default=d['version'])
            d['theme'] = get_input("Choice theme (bootstrap, semantic) [%s]:" % d['theme'], choices=['bootstrap', 'semantic'], default=d['theme'])
            d['disqus'] = get_input("Disqus account name:", d['disqus'])
            
            if d['theme'] == 'bootstrap':
                d['tag_class'] = """
'table':'table table-bordered',
'pre':'prettyprint linenums',
"""
            elif d['theme'] == 'semantic':
                d['tag_class'] = """
'table':'ui collapsing celled table segment',
'pre':'prettyprint',
"""
            
            conf_file = pkg.resource_filename('parm', 'templates/env/conf.py.txt')
            text = template.template_file(conf_file, d).replace('\r\n', '\n')
            f = open('conf.py', 'wb')
            f.write(text)
            f.close()
        
        run = False
        if os.path.exists(d['template_dirs']):
            print "Template directory [%s] is already existed! If you've changed them, please deal with manually, otherwise the content will be overwritten." % d['template_dirs']
            if get_answer("Overwrite template files") == 'Y':
                run = True
        else:
            run = True
        
        if run:
            print 'Copy %s to ./%s' % ('theme/%s/templates' % d['theme'], d['template_dirs'])
            extract_dirs('parm', 'templates/theme/%s/templates' % d['theme'], 
                d['template_dirs'])
                
        if get_answer("Copy init files [index.md*]") == 'Y':
            for f in ['index.md', 'introduction.md', 'exclude.txt']:
                if os.path.exists(f):
                    print '%s is already existed, so just skip it' % f
                else:
                    print 'Copy templates/env/%s to ./%s' % (f, f)
                    extract_file('parm', 'templates/env/%s' % f, '.')