コード例 #1
0
def copy_files(cls,
               scene_nums,
               cam_num,
               src_patterns,
               dst_pattern,
               src_dir,
               dst_dir,
               src2dst_list,
               dst_start_num=None):
    for scene_num in scene_nums:
        for c, pattern in zip(cam_num, src_patterns):
            src_final_format = pattern.replace('([0-9]*)', scene_num)
            src_final_format = src_final_format.replace('\\', '')
            src_image_file, src_txt_file = get_img_label_path(
                src_dir, src_final_format)

            if (dst_start_num != -1):
                dst_final_format = dst_pattern % (cls, c, dst_start_num)
                dst_image_file, dst_txt_file = get_img_label_path(
                    dst_dir, dst_final_format)
            else:
                dst_image_file, dst_txt_file = get_img_label_path(
                    dst_dir, src_final_format)

            src2dst_list.append([src_image_file, dst_image_file])

            copy_file(src_image_file, dst_image_file)
            copy_file(src_txt_file, dst_txt_file)

        if (dst_start_num != -1):
            dst_start_num += 1
コード例 #2
0
ファイル: main.py プロジェクト: zhiyongc/deeprl_network
def train(args):
    base_dir = args.base_dir
    dirs = init_dir(base_dir)
    init_log(dirs['log'])
    config_dir = args.config_dir
    copy_file(config_dir, dirs['data'])
    config = configparser.ConfigParser()
    config.read(config_dir)

    # init env
    env = init_env(config['ENV_CONFIG'])
    logging.info('Training: a dim %r, agent dim: %d' % (env.n_a_ls, env.n_agent))

    # init step counter
    total_step = int(config.getfloat('TRAIN_CONFIG', 'total_step'))
    test_step = int(config.getfloat('TRAIN_CONFIG', 'test_interval'))
    log_step = int(config.getfloat('TRAIN_CONFIG', 'log_interval'))
    global_counter = Counter(total_step, test_step, log_step)

    # init centralized or multi agent
    seed = config.getint('ENV_CONFIG', 'seed')
    model = init_agent(env, config['MODEL_CONFIG'], total_step, seed)

    # disable multi-threading for safe SUMO implementation
    summary_writer = tf.summary.FileWriter(dirs['log'])
    trainer = Trainer(env, model, global_counter, summary_writer, output_path=dirs['data'])
    trainer.run()

    # save model
    final_step = global_counter.cur_step
    logging.info('Training: save final model at step %d ...' % final_step)
    model.save(dirs['model'], final_step)
コード例 #3
0
def cl_news_util(arguments, cache):
    if not cache:
        htmlfile = utils.get_html_file('http://news.ycombinator.com')
        storylinks = re.findall(r'href="(.+)" class="storylink">(.+)</a><span',
                                htmlfile)
    else:
        storylinks = cache

    if len(arguments) > 1:
        if arguments[1] == '--headlines' or arguments[1] == '-h':
            utils.hn_headlines(storylinks)
            return storylinks

        if arguments[1] == '--open' or arguments[1] == '-o':
            if len(arguments) > 2:
                index = int(arguments[2])
                openpage(storylinks, index)
                return storylinks

        if arguments[1] == '--copy' or arguments[1] == '-cp':
            if len(arguments) > 2:
                utils.copy_file(arguments[2], htmlfile)
                return storylinks

    utils.handle_error('hn_error')
コード例 #4
0
ファイル: load.py プロジェクト: Skarlett/USB_Cryptor
def mount(mountTo):
    try:
        assert path.isdir('/etc/cryptmount/keys/')

    except Exception as e:
        print(
            'You might need to install the actual program and convert this USB to load it.'
        )
        exit()

    key_fp = path.join('/etc/cryptmount/keys/', NAME + '.key')

    try:
        assert path.isfile(key_fp)
    except Exception as e:
        print('No key Found.')
        raise e

    if not path.isdir(mountTo):
        mkdir(mountTo)

    copy_file(CMTAB_FP, path.join(TARGET, 'cmtab.bak'))

    system('echo "%s" > /etc/cryptmount/cmtab' %
           cmtab(mountTo, ENCRYPTED_DEV, NAME, key_fp).make())
    system('chown -R %s:%s %s' % (getlogin(), getlogin(), mountTo))
    system('chmod 0700 %s' % mountTo)

    with open(path.join(TARGET, 'tempSave.tmp'), 'w') as f:
        f.write(mountTo)

    assert system('cryptmount ' + NAME) == 0
    return True
コード例 #5
0
 def backup_skinshortcuts_properties(propertiesfile, dest_path):
     '''parse skinshortcuts properties file and translate images'''
     # look for any backgrounds and translate them
     propfile = xbmcvfs.File(propertiesfile)
     data = propfile.read()
     propfile.close()
     allprops = eval(data) if data else []
     for count, prop in enumerate(allprops):
         if prop[2] == "background":
             background = prop[3] if prop[3] else ""
             defaultid = prop[1]
             if background.endswith(".jpg") or background.endswith(
                     ".png") or background.endswith(".gif"):
                 background = get_clean_image(background)
                 extension = background.split(".")[-1]
                 newthumb = os.path.join(
                     dest_path, "%s-background-%s.%s" %
                     (xbmc.getSkinDir(), normalize_string(defaultid),
                      extension))
                 newthumb_vfs = "special://profile/addon_data/script.skinshortcuts/%s-background-%s.%s" % (
                     xbmc.getSkinDir(), normalize_string(defaultid),
                     extension)
                 if xbmcvfs.exists(background):
                     copy_file(background, newthumb)
                     allprops[count] = [
                         prop[0], prop[1], prop[2], newthumb_vfs
                     ]
     # write updated properties file
     propfile = xbmcvfs.File(propertiesfile, "w")
     propfile.write(repr(allprops))
     propfile.close()
コード例 #6
0
def pre_load(source_dic, CACHE_PATH, TMP_PATH, update_types):
    '''
    检查icd_name是否更新,若更新,重新生成cache文件,并更新tmp文件
    :param source_dic: {LC:国家临床版,GB:国家版}
    :return:
    '''
    for file_pre, file_name in source_dic.iteritems():
        f1_path = CACHE_PATH + file_pre + '_icd_name.csv'
        f2_path = TMP_PATH + file_pre + '_icd_name_shoushu.csv'

        if not utils.file_compare(f1_path, f2_path):
            # 文件改变,重新生成cache
            build_icd_norm(CACHE_PATH + file_pre + "_icd_name.csv",
                           CACHE_PATH + file_pre + "_icd_norm.csv",
                           utils.SERVICE_URL_SS)
            for type, name in update_types.iteritems():
                build_icd_type_norm(
                    CACHE_PATH + file_pre + "_icd_name.csv",
                    CACHE_PATH + file_pre + "_icd_" + type + ".csv", name,
                    file_name)
            build_icd_code_dict(CACHE_PATH + file_pre + "_icd_name.csv",
                                CACHE_PATH + file_pre + "_icdcode_dict.csv",
                                file_name, 2)
            # 更新tmp文件
            utils.copy_file(f1_path, f2_path)
コード例 #7
0
ファイル: backend.py プロジェクト: lliurex/lliuwin
 def run_previous_uninstaller(self):
     if not self.info.previous_uninstaller_path \
     or not os.path.isfile(self.info.previous_uninstaller_path):
         return
     previous_uninstaller = self.info.previous_uninstaller_path.lower()
     uninstaller = self.info.previous_uninstaller_path
     command = [uninstaller, "--uninstall"]
     # Propagate noninteractive mode to the uninstaller
     if self.info.non_interactive:
         command.append("--noninteractive")
     if 0 and previous_uninstaller.lower() == self.info.original_exe.lower():
         # This block is disabled as the functionality is achived via pylauncher
         if self.info.original_exe.lower().startswith(self.info.previous_target_dir.lower()):
             log.debug("Copying uninstaller to a temp directory, so that we can delete the containing directory")
             uninstaller = tempfile.NamedTemporaryFile()
             uninstaller.close()
             uninstaller = uninstaller.name
             copy_file(self.info.previous_uninstaller_path, uninstaller)
         log.info("Launching asynchronously previous uninstaller %s" % uninstaller)
         run_nonblocking_command(command, show_window=True)
         return True
     elif get_file_hash(self.info.original_exe) == get_file_hash(self.info.previous_uninstaller_path):
         log.info("This is the uninstaller running")
     else:
         log.info("Launching previous uninestaller %s" % uninstaller)
         subprocess.call(command)
         # Note: the uninstaller is now non-blocking so we can just as well quit this running version
         # TBD: make this call synchronous by waiting for the children process of the uninstaller
         self.application.quit()
         return True
コード例 #8
0
    def backup_skinshortcuts(self, dest_path):
        '''backup skinshortcuts including images'''
        source_path = u'special://profile/addon_data/script.skinshortcuts/'
        if not xbmcvfs.exists(dest_path):
            xbmcvfs.mkdir(dest_path)
        for file in xbmcvfs.listdir(source_path)[1]:
            file = file.decode("utf-8")
            sourcefile = source_path + file
            destfile = dest_path + file
            if xbmc.getCondVisibility(
                    "SubString(Skin.String(skinshortcuts-sharedmenu),false)"):
                # User is not sharing menu, so strip the skin name out of the destination file
                destfile = destfile.replace("%s." % (xbmc.getSkinDir()), "")
            if (file.endswith(".DATA.xml") and
                (not xbmc.getCondVisibility(
                    "SubString(Skin.String(skinshortcuts-sharedmenu),false)")
                 or file.startswith(xbmc.getSkinDir()))):
                xbmcvfs.copy(sourcefile, destfile)
                # parse shortcuts file and look for any images - if found copy them to addon folder
                self.backup_skinshortcuts_images(destfile, dest_path)

            elif file.endswith(".properties") and xbmc.getSkinDir() in file:
                if xbmc.getSkinDir() in file:
                    destfile = dest_path + file.replace(
                        xbmc.getSkinDir(), "SKINPROPERTIES")
                    copy_file(sourcefile, destfile)
                    self.backup_skinshortcuts_properties(destfile, dest_path)
            else:
                # just copy the remaining files
                copy_file(sourcefile, destfile)
コード例 #9
0
ファイル: backend.py プロジェクト: isek/mint4win
 def run_previous_uninstaller(self):
     if not self.info.previous_uninstaller_path \
     or not os.path.isfile(self.info.previous_uninstaller_path):
         return
     previous_uninstaller = self.info.previous_uninstaller_path.lower()
     uninstaller = self.info.previous_uninstaller_path
     if 0 and previous_uninstaller.lower() == self.info.original_exe.lower():
         # This block is disabled as the functionality is achived via pylauncher
         if self.info.original_exe.lower().startswith(self.info.previous_target_dir.lower()):
             log.debug("Copying uninstaller to a temp directory, so that we can delete the containing directory")
             uninstaller = tempfile.NamedTemporaryFile()
             uninstaller.close()
             uninstaller = uninstaller.name
             copy_file(self.info.previous_uninstaller_path, uninstaller)
         log.info("Launching asynchronously previous uninstaller %s" % uninstaller)
         run_nonblocking_command([uninstaller, "--uninstall"], show_window=True)
         return True
     elif get_file_md5(self.info.original_exe) == get_file_md5(self.info.previous_uninstaller_path):
         log.info("This is the uninstaller running")
     else:
         log.info("Launching previous uninestaller %s" % uninstaller)
         subprocess.call([uninstaller, "--uninstall"])
         # Note: the uninstaller is now non-blocking so we can just as well quit this running version
         # TBD: make this call synchronous by waiting for the children process of the uninstaller
         self.application.quit()
         return True
コード例 #10
0
def build_source(source_path, target_path):
    tsconfig_path = join(source_path, "tsconfig.json")

    if isfile(join(source_path, ".includes")):
        params = read_params_from_includes(source_path)
        files = read_files_from_includes(source_path)
    elif not isfile(tsconfig_path):
        params = COMPILER_OPTIONS.copy()
        files = [file for file in glob.glob(
            f"{source_path}/**/*", recursive=True)]
    else:
        # if there isn't .includes but there is tsconfig.json
        result = build_tsconfig(tsconfig_path)
        if result != 0:
            return 1
        with open(tsconfig_path) as tsconfig:
            config = json.load(tsconfig)
            library_path = normpath(
                join(source_path, config["compilerOptions"]["outFile"]))

            copy_file(library_path, target_path)
            declaration_path = f"{splitext(library_path)[0]}.d.ts"

            if(isfile(declaration_path)):
                copy_file(declaration_path, join(make_config.get_path(
                    "toolchain/build/typescript-headers"), basename(declaration_path)))
        return 0

    # decode params
    params["checkJs"] = not params.pop("nocheck")
    params["declaration"] = params.pop("declarations")
    params["experimentalDecorators"] = params.pop("decorators")

    # actually there is two directories with *.d.ts files: toolchain/jslibs (for default headers) & toolchain/build/typescript-headers (for additional libraries)
    headers = glob.glob(relpath(make_config.get_path(
        "toolchain/**/*.d.ts"), source_path), recursive=True)

    template = {
        "compilerOptions": {
            "target": "ES5",
            "lib": ["ESNext"],
            "allowJs": True,
            "downlevelIteration": True,
            "outFile": target_path
        },
        "exclude": [
            "**/node_modules/*",
            "dom"
        ],
        "include": files,
        "files": headers
    }

    for key, value in params.items():
        template["compilerOptions"][key] = value

    with open(tsconfig_path, "w") as tsconfig:
        json.dump(template, tsconfig, indent="\t")

    return build_tsconfig(tsconfig_path)
コード例 #11
0
ファイル: models.py プロジェクト: spaun299/mistofm-1
 def create(self, session):
     ices_tmp_conf = None
     try:
         ices_conf_name = '%s_ices.xml' % self.id
         ices_tmp_conf = copy_file(config.ICES_BASE_CONFIG_PATH,
                                   config.TMP_FOLDER, ices_conf_name)
         self.fill_ices_config(ices_tmp_conf)
         ices_conf_perm_path = config.ICES_CONFIGS_PATH + ices_conf_name
         move_file(ices_tmp_conf, ices_conf_perm_path)
         copy_file(config.ICES_PYTHON_BASE_MODULE_PATH,
                   config.ICES_PYTHON_MODULES_PATH,
                   "playlist_%s.py" % self.id)
         session.commit()
         if self.active:
             self.start_ices()
             if not self.running:
                 msg = "Ices station was saved and configured, " \
                       "but can't run. Please see logs"
                 flash(msg)
                 raise IcesException(msg)
     except Exception as e:
         session.rollback()
         try:
             # Delete all created files if something went wrong
             self.delete_ices_from_file_system(ices_tmp_conf)
         except OSError:
             pass
         finally:
             raise Exception(e)
コード例 #12
0
    def build(self, target_path):
        temp_path = join(temp_directory, basename(target_path))

        result = 0
        self.create_tsconfig(temp_path)
        if storage.is_path_changed(self.directory):
            import datetime

            print(
                f"building {basename(target_path)} from {self.includes_file}")

            start_time = datetime.datetime.now()
            result = self.build_source(temp_path)
            end_time = datetime.datetime.now()
            diff = end_time - start_time

            print(
                f"completed {basename(target_path)} build in {round(diff.total_seconds(), 2)}s with result {result} - {'OK' if result == 0 else 'ERROR'}"
            )
            if result != 0:
                return result
            storage.save()
        else:
            print(f"{basename(target_path)} is not changed")
        copy_file(temp_path, target_path)

        return result
コード例 #13
0
ファイル: handlers.py プロジェクト: maiduo/maiduo_web
    def _storage_image(self, image, id, path, width):
        if not image:
            return
        ids = (id % 1000, id)
        origin_path = join("user", path, "%d/%d.jpg" % ids)
        media_root = settings.MEDIA_ROOT
        try:
            os.makedirs(dirname(join(media_root, origin_path)))
        except OSError:
            pass

        origin_fd = file(os.path.join(media_root, origin_path), "w+")

        utils.copy_file(src, origin_fd)
        origin_fd.seek(0)
        original = PIL.Image.open(origin_fd)
        w, h = original.size
        if not "JPEG" == original.format:
            original.save(origin_fd.name, "JPEG")
        origin_fd.close()

        for i in width:
            self._storage_image_thumbnail(image, id, path, i)
        src.close()
        origin_fd.close()
コード例 #14
0
def main(args):
    for failure in utils.find_all_failures(args.design):
        print "Failure", failure
        ordered_suspectz = []
        for i in range(1, 100):
            log_file = failure + args.suffix + ".vennsawork/logs/abr%s/vdb.log" % (
                str(i).zfill(3))
            # print log_file
            ordered_suspectz_file = parse_suspects_from_log(log_file)
            ordered_suspectz = merge_suspect_lists(ordered_suspectz,
                                                   ordered_suspectz_file)

        log_file = failure + args.suffix + ".vennsawork/logs/vdb/vdb.log"
        ordered_suspectz_file = parse_suspects_from_log(log_file)
        ordered_suspectz = merge_suspect_lists(ordered_suspectz,
                                               ordered_suspectz_file)
        print "Number of suspects found by solver:", len(ordered_suspectz)

        # add suspects from vennsa.stdb.gz of base debugging instance in case some are missed.
        stdb_suspects = utils.parse_suspects(failure)
        print "Suspects in %s.vennsawork/vennsa.stdb.gz: %i" % (
            failure, len(stdb_suspects))
        merge_suspect_lists(ordered_suspectz, stdb_suspects)
        print "Total number of suspects:", len(ordered_suspectz)

        with open("suspects.txt", "w") as f:
            for s in ordered_suspectz:
                f.write(s + "\n")
        utils.copy_file("suspects.txt",
                        failure.replace("designs", "suspect_lists") +
                        "_suspects.txt",
                        verbose=False)
コード例 #15
0
ファイル: data_loader.py プロジェクト: nschaefe/Car2Car
    def preprocess(self):
        """Preprocess the attribute file."""

        file_name_list = os.listdir(self.image_dir)
        random.seed(1234)
        random.shuffle(file_name_list)

        for i, d in enumerate(self.domains):
            self.attr2idx[d] = i

        for i, file_name in enumerate(file_name_list):
            if (file_name.startswith('X_')):
                continue

            parts = file_name.split("-")
            label = int(parts[0])
            if label not in self.domains:
                continue
            img_name = file_name

            count = self.get_sample_count(label)
            if count < self.valid_set_size:
                # create holdout set on the fly
                utils.copy_file(self.image_dir, self.valid_set_dir, img_name)
            else:
                self.dataset.append([img_name, self.attr2idx[label]])

            self.increment_sample_count(label)

        print("Sample count per domain: " + str(self.sample_count) +
              " (including holdout set, holdout size per domain is: " +
              str(self.valid_set_size) + ")")
        print('Finished preprocessing the dataset...')
コード例 #16
0
def pre_load(source_dic):
    '''
    检查icd_name是否更新,若更新,重新生成cache文件,并更新tmp文件
    :param source_dic: {LC:国家临床版,GB:国家版}
    :return:
    '''
    for file_pre, file_name in source_dic.iteritems():
        f1_path = CACHE_PATH + file_pre + '_icd_name.csv'
        f2_path = TMP_PATH + file_pre + '_icd_name.csv'

        if not utils.file_compare(f1_path, f2_path):
            # 文件改变,重新生成cache
            build_icd_norm(CACHE_PATH + file_pre + "_icd_name.csv",
                           CACHE_PATH + file_pre + "_icd_norm.csv")
            for type, name in {
                    "region": "部位",
                    "core": "中心词",
                    "type": "特征词",
                    "others": "其他",
                    "unknown": "未知"
            }.iteritems():
                build_icd_type_norm(
                    CACHE_PATH + file_pre + "_icd_name.csv",
                    CACHE_PATH + file_pre + "_icd_" + type + ".csv", name,
                    file_name)
            build_icd_code_dict(CACHE_PATH + file_pre + "_icd_name.csv",
                                CACHE_PATH + file_pre + "_icdcode_dict.csv",
                                file_name)
            # 更新tmp文件
            utils.copy_file(f1_path, f2_path)
コード例 #17
0
def build_script(source, target):
    if os.path.isfile(source):
        ensure_file_dir(target)
        copy_file(source, target)
        return 0
    else:
        if os.path.isfile(os.path.join(source, ".includes")):
            return build_includes_dir(source, target)
コード例 #18
0
def test_copy_file(shell_mock, level_mock):
    level_mock.return_value = log.DEBUG
    fle = "asdf"
    dest = "wtf"
    utils.copy_file(fle, dest)
    shell_mock.assert_called_with(
        ['rsync', '-a', '-vv', fle, dest]
    )
コード例 #19
0
ファイル: tests.py プロジェクト: maiduo/maiduo_web
    def test_copy_file(self):
        import tempfile
        dest = tempfile.NamedTemporaryFile("w")
        src  = file("resources/10x10.png", "r")
        
        utils.copy_file(src, dest)
        self.assertEquals(os.stat(src.name).st_size, os.stat(dest.name).st_size)

        dest.close()
        src.close()
コード例 #20
0
def copy_supporting_files(start_path, destination):
    for file in list_files(start_path):
        if not (file.startswith("_") or file.startswith(".")):
            print("copying: %s to: %s" % (file, destination))
            copy_file(path.join(start_path, file), path.join(destination, file))

    for dir in list_dirs(start_path):
        if not (dir.startswith("_") or dir.startswith(".")):
            print("copying: %s to: %s" % (dir, destination))
            copy_tree(path.join(start_path, dir), path.join(destination, dir))
コード例 #21
0
def restore():
    log.info('Restoring app store preferences (.plist)...')
    source = get_app_store_preferences_backup_dir()
    dest = get_app_store_preferences_dir()
    for f in listdir(source):
        domain = f.split('.plist')[0]
        dest_path = path.join(dest, domain, 'Data/Library/Preferences')
        ensure_exists(dest_path)
        source_file = path.join(source, f)
        copy_file(source_file, dest_path)
コード例 #22
0
 def copy_eggs(source_dir):
     log.debug("Copying eggs from '%s'..." % source_dir)
     for name in os.listdir(source_dir):
         if name.endswith('.egg'):
             if PLATFORM_SUBDIR == 'linux' and name.startswith(
                     'setuptools-0.6c11'):
                 continue
             else:
                 utils.copy_file(source_path=os.path.join(source_dir, name),
                                 target_path=TEMP_LIB_EGG_DIR)
コード例 #23
0
def perform_build(target_dir, plugin_package, install_type, python_version,
                  python_executable):
    log.info("Target directory:  %s" % target_dir)
    log.info("Plug-in package:   %r" % plugin_package)
    log.info("Python version:    %s" % python_version)
    log.info("Python executable: %s" % python_executable)

    # Retrieve dependencies to the correct location
    retrieve_dep_eggs(plugin_package)

    # Install the dependencies locally using either local copies or downloading from PyPi
    deps = ['Jinja2', 'lxml']
    for dep in deps:
        command = [
            'easy_install-%s' % python_version,
            '--find-links install-temp/dep-eggs'
        ]

        command.append(dep)
        command = ' '.join(command)
        log.debug(command)
        ok = utils.run_command(command)
        if not ok:
            print "Warning: failed to run easy_install to install %s." % dep

    # Find paths to the sources to install
    source_paths = find_cone_egg_sources(plugin_package)

    log.info("Creating install directory...")
    if not os.path.exists(target_dir):
        os.makedirs(target_dir)

    if install_type == 'build':
        build_cone_eggs(source_paths, python_executable)
    if install_type == 'install':
        build_cone_eggs(source_paths, python_executable)
        install_cone_eggs(target_dir, python_version)
    else:
        develop_install_cone_sources(source_paths, target_dir, python_version,
                                     python_executable)

    # Copy RELEASE.txt
    utils.copy_file(source_path=os.path.join(SOURCE_ROOT, '..', 'RELEASE.TXT'),
                    target_path=os.path.join(target_dir, INSTALL_SUBDIR,
                                             'RELEASE.TXT'))

    # Copy cone.cmd or cone.sh, depending on the platform
    if sys.platform == "win32":
        sourcefile = targetfile = "cone.cmd"
    else:
        sourcefile = "cone.sh"
        targetfile = "cone"
    log.info("Copying %s" % sourcefile)
    utils.copy_file(source_path=os.path.join(SOURCE_ROOT, sourcefile),
                    target_path=os.path.join(target_dir, targetfile))
コード例 #24
0
def copy_files(num_list, cam_num, list_format, src_dir, dst_dir):
    for num in num_list:
        for c in cam_num:
            final_format = list_format % (c, num)
            image_file = src_dir + final_format
            txt_file = image_file.replace('.jpg', '.txt')
            txt_file = txt_file.replace('images', 'labels')
            print(image_file)
            print(txt_file)
            copy_file(image_file, dst_dir)
            copy_file(txt_file, dst_dir)
コード例 #25
0
    def build(self, target_path):
        temp_path = declarations_name = join(temp_directory,
                                             basename(target_path))

        result = 0
        self.create_tsconfig(temp_path)
        if self.is_source_changed(temp_path):
            print(f"building {basename(target_path)}")
            result = self.build_source(temp_path)
        else:
            print(f"{basename(target_path)} is not changed")
        copy_file(temp_path, target_path)
        return result
コード例 #26
0
def task_build_info():
	import json
	config = get_make_config()
	out_dir = os.path.join("output/debug", config.get_mod_dir())
	with open(config.get_path(os.path.join(out_dir, "mod.info")), "w") as info_file:
		info = dict(config.get_value("global.info", fallback={"name": "No was provided"}))
		if "icon" in info:
			del info["icon"]
		info_file.write(json.dumps(info, indent=" " * 4))
	icon_path = config.get_value("global.info.icon")
	if icon_path is not None:
		copy_file(config.get_path(icon_path, True), config.get_path(os.path.join(out_dir, "mod_icon.png")))
	return 0
コード例 #27
0
 def run(self):
     """The actual thread code"""
     self._running = True
     self._started = True
     self._log.debug('Starting %s ' % self.name)
     
     while True:
         # Assume USB flash drive is powered up and mounted.
         # If necessary, create the USB flash destination dir.
         dest_dir = self._get_dest_dir(self._dest_file_name)
         utils.make_dirs(dest_dir, self._log)
         comp_ratio = 1.0
         if self._compress:
             compressed_path = self._src_path + '.gz'
             if not self._compress_file(self._src_path, compressed_path):
                 self._log.error('Error: file compression failed on %s' % \
                                     self._src_path)
                 utils.delete_file(self._src_path, self._log)
                 utils.delete_file(compressed_path, self._log)
                 return False
             orig_bytes = float(utils.get_file_size(self._src_path))
             comp_bytes = float(utils.get_file_size(compressed_path))
             comp_ratio = orig_bytes/comp_bytes
             from_path = compressed_path
             to_path = ''.join((dest_dir, '/', self._dest_file_name, '.gz'))
         else:
             from_path = self._src_path
             to_path = ''.join((dest_dir, '/', self._dest_file_name))
         self._log.info('Storing %s' % to_path)
         self._log.info('  Compression ratio was %.2f to 1' % comp_ratio)
         try:
             utils.copy_file(from_path, to_path, self._log)
         except Exception:
             self._log.info('Error: write to %s failed' % to_path)
         # Delete the temp files
         try:
             utils.delete_file(self._src_path, self._log)
         except Exception:
             self._log.info('Error: could not delete %s' % self._src_path)
         if self._compress:
             try:
                 utils.delete_file(compressed_path, self._log)
             except Exception:
                 self._log.error('Could not delete %s' % compressed_path)
         break
         
     self._running = False
     if self._exit_callback:
         self._exit_callback(self)
     self._log.debug('Exiting %s ' % self.name)
コード例 #28
0
    def restore(self, filename="", silent=False):
        '''restore skin settings from file'''

        if not filename:
            filename = self.get_restorefilename()

        progressdialog = None
        if not silent:
            progressdialog = xbmcgui.DialogProgress(
                self.addon.getLocalizedString(32006))
            progressdialog.create(self.addon.getLocalizedString(32007))

        if filename and xbmcvfs.exists(filename):
            # create temp path
            temp_path = self.create_temp()
            if not filename.endswith("zip"):
                # assume that passed filename is actually a skinsettings file
                skinsettingsfile = filename
            else:
                # copy zip to temp directory and unzip
                skinsettingsfile = temp_path + "guisettings.txt"
                if progressdialog:
                    progressdialog.update(0, "unpacking backup...")
                zip_temp = u'%sskinbackup-%s.zip' % (
                    ADDON_DATA, datetime.now().strftime('%Y-%m-%d-%H-%M'))
                copy_file(filename, zip_temp, True)
                unzip_fromfile(zip_temp, temp_path)
                delete_file(zip_temp)
                # copy skinshortcuts preferences
                self.restore_skinshortcuts(temp_path)
                # restore any custom skin images or themes
                for directory in ["custom_images/", "themes/"]:
                    custom_images_folder = u"special://profile/addon_data/%s/%s" % (
                        xbmc.getSkinDir(), directory)
                    custom_images_folder_temp = temp_path + directory
                    if xbmcvfs.exists(custom_images_folder_temp):
                        for file in xbmcvfs.listdir(
                                custom_images_folder_temp)[1]:
                            xbmcvfs.copy(custom_images_folder_temp + file,
                                         custom_images_folder + file)
            # restore guisettings
            if xbmcvfs.exists(skinsettingsfile):
                self.restore_guisettings(skinsettingsfile, progressdialog)

            # cleanup temp
            recursive_delete_dir(temp_path)
            progressdialog.close()
        if not silent:
            xbmcgui.Dialog().ok(self.addon.getLocalizedString(32006),
                                self.addon.getLocalizedString(32009))
コード例 #29
0
def copy_additionals(source, destination):
    global root_files

    files = os.listdir(source)
    for f in files:
        if f in root_files:
            continue
        src = os.path.join(source, f)
        dest = os.path.join(destination, "src", "assets", "root")

        if (os.path.isfile(src)):
            copy_file(src, os.path.join(dest, f))
        elif (os.path.isdir(src)):
            copy_file(src, os.path.join(dest, f))
コード例 #30
0
def task_build_scripts():
    import json
    config = get_make_config()
    with open(config.get_path("output/mod.info"), "w") as info_file:
        info = dict(config.get_value("global.info",
                                     fallback={"name": "No was provided"}))
        if "icon" in info:
            del info["icon"]
        info_file.write(json.dumps(info, indent=" " * 4))
    icon_path = config.get_value("global.info.icon")
    if icon_path is not None:
        copy_file(config.get_path(icon_path),
                  config.get_path("output/mod_icon.png"))
    return 0
コード例 #31
0
def build_snpeff_db(reference, gff, snpeff_config, snpeff_db):
    """
    build SnpEff database for a reference genome
    
    :param: snpeff_config
    :param snpeff_db:
    :param reference:
    :param gff:
    :return:
    """

    # locate the executable
    snpeff = find_executable(['snpEff'])

    snpeff_db = os.path.abspath(snpeff_db)

    # create SnpEff database
    prefix = os.path.join(os.path.abspath(os.path.dirname(reference)),
                          os.path.splitext(os.path.basename(reference))[0])
    index_base = os.path.basename(prefix)
    snpeff_data_dir = os.path.join(snpeff_db, 'data')
    snpeff_genes_dir = os.path.join(snpeff_data_dir, index_base)
    mkdir(snpeff_data_dir)
    mkdir(snpeff_genes_dir)

    # copy the files
    copy_file(src=gff, dest=os.path.join(snpeff_genes_dir, 'genes.gff'))
    copy_file(src=reference,
              dest=os.path.join(snpeff_genes_dir, 'sequences.fa'))

    # Add a genome to the configuration file
    snpeff_config = os.path.join(snpeff_db, 'snpeff.config')
    with open(snpeff_config, 'w') as f_obj:
        f_obj.write('{}.genome : {}\n'.format(index_base, index_base))

    # check if db exists and build if not
    db_bin = os.path.join(snpeff_genes_dir, 'snpEffectPredictor.bin')
    if os.path.exists(db_bin):
        logging.critical("SnpEff database exist for {}".format(index_base))
    else:
        # build db
        call = [
            "{} build -config {} -dataDir {} -gff3 -v {}".format(
                snpeff, snpeff_config, snpeff_data_dir, index_base)
        ]
        cmd = " ".join(call)
        logging.info("building SnpEFF database: {}".format(gff))
        run_shell_command(cmd=cmd, raise_errors=False, extra_env=None)
    return snpeff_config, snpeff_data_dir
コード例 #32
0
 def restore_skinshortcuts(temp_path):
     '''restore skinshortcuts files'''
     source_path = temp_path + u"skinshortcuts/"
     if xbmcvfs.exists(source_path):
         dest_path = u'special://profile/addon_data/script.skinshortcuts/'
         for filename in xbmcvfs.listdir(source_path)[1]:
             filename = filename.decode("utf-8")
             sourcefile = source_path + filename
             destfile = dest_path + filename
             if filename == "SKINPROPERTIES.properties":
                 destfile = dest_path + filename.replace(
                     "SKINPROPERTIES", xbmc.getSkinDir())
             elif xbmc.getCondVisibility(
                     "SubString(Skin.String(skinshortcuts-sharedmenu),false)"
             ):
                 destfile = "%s-" % (xbmc.getSkinDir())
             copy_file(sourcefile, destfile)
コード例 #33
0
    def test(self, testing_data_dir, result_dir, test_saver, n_cores, num_test=False):
        print('Start Testing')
        tmp_list = search_wav(testing_data_dir)

        if num_test:
            test_list = np.random.choice(tmp_list, num_test)
        else:
            test_list = tmp_list

        print('All testing data number:', len(test_list))
        REG_dir = join(result_dir, 'REG')
        Noisy_write_dir = join(result_dir, 'Source')
        Clean_write_dir = join(result_dir, 'Target')

        if not os.path.exists(result_dir):
            os.makedirs(result_dir)
            os.makedirs(REG_dir)
            os.makedirs(Noisy_write_dir)
            os.makedirs(Clean_write_dir)
        with tf.Session(config=self.config) as sess:
            self.saver.restore(sess=sess, save_path=test_saver)
            for file in tqdm(test_list):
                hop_length = 256
                file_name = file.split('/')[-1]
                try:
                    snr, noise_name, clean_name1, clean_neme2 = file.split(
                        '/')[-1].split('_')
                    clean_file = join(testing_data_dir, '_'.join(
                        ['0dB', 'n0', clean_name1, clean_neme2]))
                    noisy_file = file
                except:
                    snr, noise_name, clean_name = file.split(
                        '/')[-1].split('_')
                noisy_file = join(testing_data_dir, file_name)
                REG_file = join(REG_dir, file_name)
                Noisy_file = join(Noisy_write_dir, file_name)
                Clean_file = join(Clean_write_dir, file_name)

                X_in_seq = wav2spec(noisy_file, sr=16000,
                                    forward_backward=True, SEQUENCE=False, norm=True, hop_length=hop_length)
                re_reg = sess.run([self.reg_layer],
                                  feed_dict={self.x_noisy: X_in_seq})[:][0]
                spec2wav(noisy_file, 16000, REG_file,
                         re_reg, hop_length=hop_length)
                copy_file(noisy_file, Noisy_file)
                copy_file(clean_file, Clean_file)
コード例 #34
0
ファイル: meg_statistics.py プロジェクト: ofek-schechner/mmvt
def calc_labels_avg(events_id, tmin, inverse_method='dSPM', do_plot=False):
    d = np.load(op.join(LOCAL_ROOT_DIR, 'fsaverage_labels_indices.npz'))
    labels_vertices, labels_names = d['labels_vertices'], d['labels_names']

    if do_plot:
        plt.close('all')
        plt.figure()

    res_fol = op.join(LOCAL_ROOT_DIR, 'permutation_ttest_results')
    for cond_id, cond_name in enumerate(events_id.keys()):
        for patient in get_patients():
            blender_fol = blender_template.format(patient=patient, cond_name=cond_name)
            results_file_name = op.join(res_fol, '{}_{}_{}.npz'.format(patient, cond_name, inverse_method))
            clusters_file_name = op.join(res_fol, '{}_{}_{}_clusters.npy'.format(patient, cond_name, inverse_method))
            if op.isfile(results_file_name) and op.isfile(clusters_file_name):
                print('{}, {}'.format(patient, cond_name))
                clusters = np.load(clusters_file_name)
                ttest_res = np.load(results_file_name)
                for data, data_name in zip([ttest_res['T_obs'].T, clusters], ['ttest_res', 'clusters']):
                    fsave_vertices = utils.fsaverage_vertices()
                    stc = _make_stc(data, fsave_vertices, tmin=tmin, tstep=tstep, subject='fsaverage')
                    labels_data = np.zeros((len(labels_vertices), stc.data.shape[1], 2))
                    for ind, (vertidx, label_name) in enumerate(zip(labels_vertices, labels_names)):
                        if vertidx is not None:
                            labels_data[ind] = utils.get_max_min(stc.data[vertidx, :])
                        if do_plot:
                            plt.plot(labels_data[ind, :, 0], label='{} p<c'.format(label_name))
                            plt.plot(labels_data[ind, :, 1], label='{} p>c'.format(label_name))
                    for hemi in ['rh', 'lh']:
                        indices = [ind for ind, l in enumerate(labels_names) if hemi in l]
                        labels = [str(l) for l in labels_names if hemi in l]
                        np.savez(op.join(blender_fol, 'labels_data_{}_{}.npz'.format(hemi, data_name)),
                            data=labels_data[indices], names=labels, conditions=['p<c', 'p>c'])

                    if do_plot:
                        plt.legend()
                        plt.xlabel('time (ms)')
                        plt.title('{} {}'.format(patient, cond_name))
                        plt.show()
                        print('show!')

                # Make clusters to be the default files for blender
                for hemi in ['rh', 'lh']:
                    utils.copy_file(op.join(blender_fol, 'labels_data_{}_clusters.npz'.format(hemi)),
                                    op.join(blender_fol, 'labels_data_{}.npz'.format(hemi)))
コード例 #35
0
ファイル: djangodeployer.py プロジェクト: viep/cmdbac
    def deploy_repo_attempt(self, deploy_path):
        LOG.info(utils.configure_env(self.base_path))

        manage_files = utils.search_file(deploy_path, 'manage.py')
        if not manage_files:
            return ATTEMPT_STATUS_MISSING_REQUIRED_FILES
        manage_paths = [os.path.dirname(manage_file) for manage_file in manage_files]
        base_dir = next(name for name in manage_paths if 'lib/python2.7/site-packages/' not in name)
        manage_path = next(name for name in manage_paths if name.startswith(base_dir))
        LOG.info('manage.py path: {}'.format(manage_path))

        with open(os.path.join(manage_path, 'manage.py'), 'r') as manage_file:
            s = re.search('os.environ.setdefault\("DJANGO_SETTINGS_MODULE", "(.*)"\)', manage_file.read())
            if s:
                setting_path = s.group(1)
            else:
                return ATTEMPT_STATUS_MISSING_REQUIRED_FILES

        setting_path = setting_path.replace('.', '/')
        if os.path.isdir(os.path.join(manage_path, setting_path)):
            setting_path = os.path.join(manage_path, setting_path)
            for setting_file in sorted(os.listdir(setting_path)):
                if os.path.isfile(os.path.join(setting_path, setting_file)):
                    setting_path = os.path.join(setting_path, setting_file)
                    break
            self.setting_path = setting_path
        elif os.path.isfile(os.path.join(manage_path, setting_path + '.py')):
            setting_path = os.path.join(manage_path, setting_path + '.py')
            self.setting_path = setting_path
        else:
            for candidate_setting_files in utils.search_file_regex(deploy_path, '^settings.*\.py$'):
                setting_path = os.path.join(manage_path, setting_path + '.py')
                utils.copy_file(candidate_setting_files, setting_path)
                self.setting_path = setting_path
                break
        if self.setting_path == None:
            return ATTEMPT_STATUS_MISSING_REQUIRED_FILES
        LOG.info('setting.py path: {}'.format(setting_path))

        requirement_files = utils.search_file(deploy_path, 'requirements.txt')
        if requirement_files:
            LOG.info('requirements.txt path: {}'.format(requirement_files))
        
        return self.try_deploy(manage_path, requirement_files)
コード例 #36
0
ファイル: backend.py プロジェクト: Ando02/wubiuefi
    def create_preseed_diskimage(self):
        source = join_path(self.info.data_dir, 'preseed.disk')
        template = read_file(source)
        password = md5_password(self.info.password)
        dic = dict(
            timezone = self.info.timezone,
            password = password,
            keyboard_variant = self.info.keyboard_variant,
            keyboard_layout = self.info.keyboard_layout,
            locale = self.info.locale,
            user_full_name = self.info.user_full_name,
            username = self.info.username)
        for k,v in dic.items():
            k = "$(%s)" % k
            template = template.replace(k, v)
        preseed_file = join_path(self.info.install_dir, "preseed.cfg")
        write_file(preseed_file, template)

        source = join_path(self.info.data_dir, "wubildr-disk.cfg")
        target = join_path(self.info.install_dir, "wubildr-disk.cfg")
        copy_file(source, target)
コード例 #37
0
    def test_6_extensions_directory_autoload(self):
        utils.copy_file(test_base.ARGS.build + "/osquery/example_extension.ext",
            test_base.CONFIG_DIR)
        loader = test_base.Autoloader([test_base.CONFIG_DIR])
        daemon = self._run_daemon({
            "disable_watchdog": True,
            "extensions_timeout": EXTENSION_TIMEOUT,
            "extensions_autoload": loader.path,
        })
        self.assertTrue(daemon.isAlive())

        # Get a python-based thrift client
        client = test_base.EXClient(daemon.options["extensions_socket"])
        self.assertTrue(client.open(timeout=EXTENSION_TIMEOUT))
        em = client.getEM()

        # The waiting extension should have connected to the daemon.
        result = test_base.expect(em.extensions, 1)
        self.assertEqual(len(result), 1)

        client.close()
        daemon.kill(True)
コード例 #38
0
ファイル: generator.py プロジェクト: shaunstanislaus/tags
def build_files(root=u'.', dest=u'_site', pattern=u'**/*.html', 
                exclude=u'_*/**', watch=False, force=False):
    try:
        os.stat(os.path.join(root, 'index.html'))
    except OSError:
        if not force:
            msg = "Oops, we can't find an index.html in the source folder.\n"+\
                  "If you want to build this folder anyway, use the --force\n"+\
                  "option."
            print(msg)
            sys.exit(1)

    print("Building site from '{0}' into '{1}'".format(root, dest))

    exclude = exclude or os.path.join(dest, u'**')
    for filename in utils.walk_folder(root or '.'):
        included = utils.matches_pattern(pattern, filename)
        excluded = utils.matches_pattern(exclude, filename)
        destfile = os.path.join(dest, filename)
        if included and not excluded: 
            build_file(filename, destfile, root=root)
        elif not excluded:
            filepath = os.path.join(root, filename)
            destpath = os.path.join(dest, filename)
            utils.copy_file(filepath, destpath)

    if watch:
        observer = _watch(root=root,
                          dest=dest,
                          pattern=pattern,
                          exclude=exclude)
        if not observer:
            return
        try:
            while True:
                time.sleep(1)
        except KeyboardInterrupt:
            observer.stop()
        observer.join()
コード例 #39
0
ファイル: rordeployer.py プロジェクト: viep/cmdbac
    def deploy_repo_attempt(self, deploy_path):
        rake_files = utils.search_file(deploy_path, 'Rakefile')
        if not rake_files:
            LOG.error('No rakefile found!')
            return ATTEMPT_STATUS_MISSING_REQUIRED_FILES
        rakefile_paths = [os.path.dirname(rake_file) for rake_file in rake_files]

        gem_files = utils.search_file(deploy_path, 'Gemfile')
        if not gem_files:
            LOG.error('No gemfile found!')
            return ATTEMPT_STATUS_MISSING_REQUIRED_FILES
        gemfile_paths = [os.path.dirname(gem_file) for gem_file in gem_files]

        base_dirs = sorted(set.intersection(set(rakefile_paths), set(gemfile_paths)))
        if not base_dirs:
            LOG.error('Can not find base directory!')
            return ATTEMPT_STATUS_MISSING_REQUIRED_FILES
        base_dir = next(iter(base_dirs))

        config_files = utils.search_file_regex(os.path.join(base_dir, 'config'), '.*yml.*')
        for config_file in config_files:
            if '.example' in config_file:
                new_config_file = config_file.replace('.example', '')
                utils.copy_file(config_file, new_config_file)
            elif '/example' in config_file:
                new_config_file = config_file.replace('/example', '')
                utils.copy_file(config_file, new_config_file)
            elif '-sample' in config_file:
                new_config_file = config_file.replace('-sample', '')
                utils.copy_file(config_file, new_config_file)
            elif '.tmpl' in config_file:
                new_config_file = config_file.replace('.tmpl', '')
                utils.copy_file(config_file, new_config_file)

        self.setting_path = base_dir

        return self.try_deploy(base_dir)
コード例 #40
0
 def handle_file(self, source_path, dest_path):
     copy_file(source_path, dest_path)
     print 'Copied {0} to {1}'.format(source_path, dest_path)        
コード例 #41
0
ファイル: backend.py プロジェクト: isek/mint4win
 def create_preseed_cdboot(self):
     source = join_path(self.info.data_dir, 'preseed.cdboot')
     target = join_path(self.info.custominstall, "preseed.cfg")
     copy_file(source, target)
コード例 #42
0
 def setUp(self):
     copy_file(
         THIS_TEST_DIR + "/Biblioteca.gbs",
         TEST_DIR + "/examples/Biblioteca.gbs"
         )
コード例 #43
0
ファイル: build_fs_tree.py プロジェクト: LEAMgroup/iw.fss
def build_fs_tree(src_path, dst_path, lib_path):
    """Build FS tree"""

# Rdf imports
    sys.path.append(lib_path)
    from rdf import RDFReader
    from utils import copy_file

    print "Build filesystem data in %s from %s" % (src_path, dst_path)
    sys_encoding = sys.getfilesystemencoding()

    # Store rdf files
    # List of dictionnary {'field': ..., 'uid': ...}
    rdf_files = []

    # Walk into filesystem
    for root, dirs, files in os.walk(src_path):
        if root == src_path:
            # Loop on files
            for item in files:
                match = SEARCH_RDF_RE.match(item)
                if match is None:
                    continue

                # Get field name and content uid
                uid = match.group('uid')
                field = match.group('field')
                rdf_files.append({'uid': uid, 'field': field})

    # Processing collected rdf files
    print "Processing %s rdf files" % str(len(rdf_files))
    file_paths = []
    for rdf_file in rdf_files:
        uid = rdf_file['uid']
        field = rdf_file['field']

        # Get RDF file
        rdf_filename = '%s_%s.rdf' % (uid, field)
        rdf_path = os.path.join(src_path, rdf_filename)
        rdf_file = StringIO()
        rdf_text = ''
        try:
            copy_file(rdf_path, rdf_file)
            rdf_file.seek(0)
            rdf_text = rdf_file.getvalue()
        finally:
            rdf_file.close()

        # Read RDF properties
        try:
            rdf_reader = RDFReader(rdf_text)
        except:
            try:
                # XXX known bug to fix
                rdf_text = rdf_text.replace('&', '&amp;')
                rdf_reader = RDFReader(rdf_text)
            except:
                print rdf_path
                print rdf_text
                raise
        field_url = rdf_reader.getFieldUrl()
        field_url = field_url.encode(sys_encoding, 'replace')

        # Create tree directories
        content_path_array = field_url.split('/')[:-2]
        content_path = dst_path
        for content_dir in content_path_array:
            content_path = os.path.join(content_path, content_dir)
            if os.path.exists(content_path):
                continue
            print "Create path: %s" % content_path
            os.mkdir(content_path)

        # Get source file
        src_filename = '%s_%s' % (uid, field)
        src_file_path = os.path.join(src_path, src_filename)

        if not os.path.exists(src_file_path):
            print "Source file doesn't exist, we continue: %s" % src_file_path
            continue

        # Get destination file
        dst_filename = field
        dst_filenames = rdf_reader.getFieldProperty('fss:filename')
        if dst_filenames:
            dst_filename = dst_filenames[0]
            if not dst_filename:
                dst_filename = field
            else:
                dst_filename = dst_filename.encode(sys_encoding, 'replace')
        dst_file_path = os.path.join(content_path, dst_filename)

        # In some cases, you can have a content having 2 fss fields with
        # 2 files with the same name
        orig_dst_filename = dst_filename
        dst_file_path_ok = False
        index = 0
        while not dst_file_path_ok:
            if dst_file_path not in file_paths:
                dst_file_path_ok = True
                file_paths.append(dst_file_path)
            else:
                index += 1
                dst_filename = '%s-%s' % (str(index), orig_dst_filename)
                print dst_filename
                dst_file_path = os.path.join(content_path, dst_filename)

        print "Create file: %s" % dst_file_path
        copy_file(src_file_path, dst_file_path)
        print "Create RDF file: %s.rdf" % dst_file_path
        copy_file(rdf_path, dst_file_path + '.rdf')

    print "Filesystem data built complete"