Пример #1
0
    def up(self):
        remote_dir = self.remote_dir
        if not os.path.exists(remote_dir):
            os.mkdir(remote_dir)

        tries = 5
        while tries > 0:
            self.mount(remote_dir)
            sleep(1)

            try:
                sh.ls(remote_dir)
            except sh.ErrorReturnCode:
                pstor.umount(remote_dir)
            else:
                break

            tries -= 1
        else:
            raise exceptions.PstorException("Can't ls in mounted webdav directory")

        remote_dir = os.path.join(self.remote_dir, 'pstor/')

        if not os.path.exists(remote_dir):
            os.mkdir(remote_dir)
        #TODO: Check if not rewrites files (e.g. existing repo in new repo)
        sh.rsync(remote_dir, '.pstor/encrypted', recursive=True)
Пример #2
0
    def add_all_files(self):
        SUFFIXES = {'_dat.zip',
                    '_metadata.xml',
                    'README.md',
                    'CONTRIBUTING.md',
                    'LICENSE.md',
                    '.adoc'
                    }
        with CdContext(self.directory):
            sh.git.init('.')

            logging.debug("Files to add: " + str(sh.ls()))

            # NOTE: repo.untracked_files is unreliable with CdContext
            # using sh.ls() instead, this doesn't recognize .gitignore
            for _file in sh.ls('-1'):
                # TODO: This attempts to add existing files a second time
                _file = _file.rstrip('\n')
                for suffix in SUFFIXES:
                    if _file.endswith(suffix):
                        logging.info("Adding file: " + _file)
                        self.add_file(_file)
                        break
                else:
                    logging.debug('Skipping ' + _file)
Пример #3
0
 def mount_volume(self):
     logger.debug('Mounting {} to {} on droplet {}'.format(
         self.device_name, self.mount_point, droplet.id))
     try:
         logger.debug('Waiting a bit to for disk to be attached')
         sleep(5)
         ls('-l', self.device_name)
     except ErrorReturnCode:
         pass
     if self.new_volume:
         logger.debug(
             '{} is brand new volume, creating filesystem on it'.format(
                 self.name))
         if not self.create_filesystem():
             return False
     try:
         mkdir('-p', self.mount_point)
     except ErrorReturnCode as err:
         logger.error('Failed creating mount point {}'.format(
             self.mount_point))
         logger.debug(err)
         return False
     try:
         mount(self.device_name, self.mount_point)
     except ErrorReturnCode_32 as err:
         logger.error(
             'Can\'t mount {}, looks like it\'s has no valid filesystem or maybe it\'s partitioned'
             .format(self.device_name))
         logger.debug(err)
         return False
     except ErrorReturnCode as err:
         logger.error('Mount failed: {}'.format(err))
         return False
     return True
Пример #4
0
 def prebuild_arch(self, arch):
     if not self.is_patched(arch):
         super(ReportLabRecipe, self).prebuild_arch(arch)
         self.apply_patch('patches/fix-setup.patch', arch.arch)
         recipe_dir = self.get_build_dir(arch.arch)
         shprint(sh.touch, os.path.join(recipe_dir, '.patched'))
         ft = self.get_recipe('freetype', self.ctx)
         ft_dir = ft.get_build_dir(arch.arch)
         ft_lib_dir = os.environ.get('_FT_LIB_', os.path.join(ft_dir, 'objs', '.libs'))
         ft_inc_dir = os.environ.get('_FT_INC_', os.path.join(ft_dir, 'include'))
         tmp_dir = os.path.normpath(os.path.join(recipe_dir, "..", "..", "tmp"))
         info('reportlab recipe: recipe_dir={}'.format(recipe_dir))
         info('reportlab recipe: tmp_dir={}'.format(tmp_dir))
         info('reportlab recipe: ft_dir={}'.format(ft_dir))
         info('reportlab recipe: ft_lib_dir={}'.format(ft_lib_dir))
         info('reportlab recipe: ft_inc_dir={}'.format(ft_inc_dir))
         with current_directory(recipe_dir):
             sh.ls('-lathr')
             ensure_dir(tmp_dir)
             pfbfile = os.path.join(tmp_dir, "pfbfer-20070710.zip")
             if not os.path.isfile(pfbfile):
                 sh.wget("http://www.reportlab.com/ftp/pfbfer-20070710.zip", "-O", pfbfile)
             sh.unzip("-u", "-d", os.path.join(recipe_dir, "src", "reportlab", "fonts"), pfbfile)
             if os.path.isfile("setup.py"):
                 with open('setup.py', 'rb') as f:
                     text = f.read().replace('_FT_LIB_', ft_lib_dir).replace('_FT_INC_', ft_inc_dir)
                 with open('setup.py', 'wb') as f:
                     f.write(text)
Пример #5
0
def mount_casper():
    """Create temporary casper mount point and mount casper read/write file."""
    if not os.path.exists(mem.casper_rw_src):
        print "[!] Cannot find casper-rw source {0}".format(mem.casper_rw_src)
        cleanexit(1)
    if not os.path.exists(mem.casper_rw_mp):
        if mem.options.verbose:
            print "[+] Creating mount point \"{0}\"".format(mem.casper_rw_mp)
        sudo.mkdir(mem.casper_rw_mp)

    if mem.options.verbose:
        print "[+] Mounting {0} to {1}".format(mem.casper_rw_src,
                                               mem.casper_rw_mp)
    try:
        sudo.mount("-o",
                   "loop,rw",
                   "-t",
                   "ext2",
                   mem.casper_rw_src,
                   mem.casper_rw_mp,
                   _out=sys.stdout,
                   _err=sys.stderr)
    except Exception as exception_:
        print u"[!] Exception: {0}".format(exception_)
        cleanexit(1)
    if mem.options.debug:
        ls("-lat", mem.casper_rw_mp, _out=sys.stdout)
Пример #6
0
def has_vars_file(module_dir):
    vars_file = "{}/vars.tf".format(module_dir)
    try:
        sh.ls(vars_file)
        return vars_file
    except sh.ErrorReturnCode as e:
        print("{} does not exist!".format(vars_file))
        raise e
Пример #7
0
def test_glob_expansion():
    # TODO: error
    import sh

    # this will not work
    sh.ls('*.py')

    sh.ls(sh.glob('*.py'))
Пример #8
0
def get_module_file(module_dir):
    module_file = "{}/modules.tf".format(module_dir)
    try:
        sh.ls(module_file)
        return module_file
    except sh.ErrorReturnCode as e:
        print("{} does not exist!".format(module_file))
        raise e
Пример #9
0
def test_command_execution():
    import sh
    print sh.ls('/')

    from sh import ls
    print ls('/')

    run = sh.Command('/home/echo.sh')
    run()
Пример #10
0
    def test_ok_code(self):
        from sh import ls, ErrorReturnCode_1, ErrorReturnCode_2

        exc_to_test = ErrorReturnCode_2
        code_to_pass = 2
        if IS_OSX:
            exc_to_test = ErrorReturnCode_1
            code_to_pass = 1
        self.assertRaises(exc_to_test, ls, "/aofwje/garogjao4a/eoan3on")

        ls("/aofwje/garogjao4a/eoan3on", _ok_code=code_to_pass)
        ls("/aofwje/garogjao4a/eoan3on", _ok_code=[code_to_pass])
Пример #11
0
Файл: test.py Проект: ahhentz/sh
 def test_ok_code(self):
     from sh import ls, ErrorReturnCode_1, ErrorReturnCode_2
     
     exc_to_test = ErrorReturnCode_2
     code_to_pass = 2
     if IS_OSX:
         exc_to_test = ErrorReturnCode_1
         code_to_pass = 1
     self.assertRaises(exc_to_test, ls, "/aofwje/garogjao4a/eoan3on")
     
     ls("/aofwje/garogjao4a/eoan3on", _ok_code=code_to_pass)
     ls("/aofwje/garogjao4a/eoan3on", _ok_code=[code_to_pass])
Пример #12
0
 def test_no_pipe(self):
     from sh import ls
     
     p = ls()
     self.assertFalse(p.process._pipe_queue.empty())
     
     def callback(line): pass
     p = ls(_out=callback)
     self.assertTrue(p.process._pipe_queue.empty())
     
     p = ls(_no_pipe=True)
     self.assertTrue(p.process._pipe_queue.empty())
Пример #13
0
Файл: test.py Проект: 0xr0ot/sh
    def test_no_pipe(self):
        from sh import ls

        p = ls()
        self.assertFalse(p.process._pipe_queue.empty())

        def callback(line): pass
        p = ls(_out=callback)
        self.assertTrue(p.process._pipe_queue.empty())

        p = ls(_no_pipe=True)
        self.assertTrue(p.process._pipe_queue.empty())
Пример #14
0
Файл: test.py Проект: ahhentz/sh
    def test_glob_warning(self):
        from sh import ls
        from glob import glob
        import warnings

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")

            ls(glob("ofjaoweijfaowe"))

            self.assertTrue(len(w) == 1)
            self.assertTrue(issubclass(w[-1].category, UserWarning))
            self.assertTrue("glob" in str(w[-1].message))
Пример #15
0
    def add_all_files(self):
        with CdContext(self.book.local_path):
            sh.git.init('.')

            logging.debug("files to add: " + str(sh.ls()))

            # NOTE: repo.untracked_files is unreliable with CdContext
            # using sh.ls() instead, this doesn't recognize .gitignore
            for _file in sh.ls():
                for _subpath in _file.split():
                    logging.debug("adding file: " + str(_file))

                    self.add_file(_subpath)
Пример #16
0
def test_exit_codes():
    from sh import ls, ErrorReturnCode_2, ErrorReturnCode
    output = ls('/')
    print output.exit_code  # should be 0

    # error return code
    try:
        print ls('/some/non-existent')
    except ErrorReturnCode_2:
        print 'folder does not exit!'
    except ErrorReturnCode:
        print 'unknown error'
        exit(1)
Пример #17
0
    def add_all_files(self):
        with CdContext(self.book.local_path):
            sh.git.init('.')

            logging.debug("files to add: " + str(sh.ls()))

            # NOTE: repo.untracked_files is unreliable with CdContext
            # using sh.ls() instead, this doesn't recognize .gitignore
            for _file in sh.ls():
                for _subpath in _file.split():
                    logging.debug("adding file: " + str(_file))

                    self.add_file(_subpath)
Пример #18
0
    def test_generation(self):
        """Test standard app generation"""
        cookiecutter('../', no_input=True, overwrite_if_exists=True)

        file_list = ((sh.ls('polls'), (
            'apps.py',
            '__init__.py',
            'models.py',
        )), (sh.ls('polls/tests'), ('__init__.py', 'test_models.py')))

        for ls in file_list:
            for file in ls[1]:
                self.assertIn(file, ls[0])
Пример #19
0
    def test_glob_warning(self):
        from sh import ls
        from glob import glob
        import warnings

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")

            ls(glob("ofjaoweijfaowe"))

            self.assertTrue(len(w) == 1)
            self.assertTrue(issubclass(w[-1].category, UserWarning))
            self.assertTrue("glob" in str(w[-1].message))
Пример #20
0
    def search_yml(self, request, pk=None):
        yml_dir_all = []
        yml_set = []
        yml_dir_list = request.DATA['pro_ansi_release_yml'].split('/')
	print yml_dir_list
        yml_dir_list.pop()
        yml_dir = '/'.join(yml_dir_list) + '/costume'
        yml_dir_all.append([yml_dir, request.DATA['pro_name']])
        print yml_dir_all
        for i,x in yml_dir_all:
            if os.path.exists(i):
                try:
                    files = sh.ls(i).split()
                except:
                    pass
                else:
                    for j in files:
                        print j
                        with open(i + '/' + j,'r') as f:
                            try:
                                explain = f.readlines()[0].strip().split(':')[1]
                            except:
                                explain = 'no explanation'
                            yml_set.append({'pro_name': x,
                                            'yml_name': j,
                                            'yml_full_distination': i + '/' + j,
                                            'yml_explain': explain})
        return Response(yml_set)
Пример #21
0
    def test_bake_args_come_first(self):
        from sh import ls
        ls = ls.bake(full_time=True)

        ran = ls("-la").command_ran
        ft = ran.index("full-time")
        self.assertTrue("-la" in ran[ft:])
Пример #22
0
 def test_incremental_composition(self):
     from sh import ls, wc
     c1 = int(wc(ls("-A1", _piped=True), l=True).strip())
     c2 = len(os.listdir("."))
     if c1 != c2:
         with open("/tmp/fail", "a") as h: h.write("F**K\n")
     self.assertEqual(c1, c2)
Пример #23
0
def cover_calc(in_dir, infile):
    all_query_files = sh.ls(in_dir).split()
    all_query_files = [in_dir + "/" + i for i in all_query_files]

    query_dict = collections.defaultdict(int)
    gentime_query_dict = collections.defaultdict(int)

    for fname in all_query_files:
        print "processing", fname
        with open(fname) as f:
            for line in f:
                try:
                    (query, freq) = line.strip().split('\t')
                    query_dict[query] += int(freq)
                except ValueError:
                    pass
    with open(infile) as f:
        for line in f:
            query = line.strip().split('\t')[0]
            gentime_query_dict[query] = query_dict[query]

    total = 0
    gen_freq = 0

    for k in query_dict:
        total += query_dict[k]
    for k in gentime_query_dict:
        gen_freq += gentime_query_dict[k]

    print gen_freq, total, float(gen_freq) / total
Пример #24
0
def test_code_qa(cookies, context):
    """Generated project should pass flake8 and py.test."""
    context['create_virtualenv'] = 'Yes'
    result = cookies.bake(extra_context=context)
    base_path = str(result.project)
    # Run Flake 8
    try:
        sh.flake8('{path}/setup.py {path}/{namespace}'.format(
                path=base_path,
                namespace=context['namespace']
            )
        )
    except sh.ErrorReturnCode as e:
        pytest.fail(e)
    # Run tests
    try:
        ls = sh.ls('{path}/env/bin/'.format(path=base_path))
        cmd = 'pytest'
        if 'pytest' in str(ls.stdout):
            cmd = './env/bin/pytest'
        proc = subprocess.Popen(
            [cmd],
            shell=sys.platform.startswith('win'),
            cwd=base_path
        )
        proc.wait()
    except Exception as e:
        print(ls.stdout)
        pytest.fail(e)
Пример #25
0
    def prepare_data(self): # 数据的预处理转换
        has_cache_files = False
        
        try:
            cache_dir = sh.ls(FLAGS.cache_dir)
            if 'train.set' in cache_dir and 'valid.set' in cache_dir:
                has_cache_files = True
        except Exception as e:
            logging.error(e)
            sh.mkdir(FLAGS.cache_dir)

        if not has_cache_files:
            X, Y = [], []

            for entity_example in self.entity_examples:
                item_x, item_y = [], []
                for item in entity_example:
                    item_x.append(item[0])
                    item_y.append(self.entity2id[item[1]])
                X.append(item_x)
                Y.append(item_y)

            train_x, valid_x, train_y, valid_y = train_test_split(X, Y, test_size=FLAGS.test_size, random_state=0)

            train_input_ids = self._convert_text_to_ids(self.tokenizer, train_x)
            valid_input_ids = self._convert_text_to_ids(self.tokenizer, valid_x)

            train_input_ids, train_labels, train_attention_masks = self._convert_example_to_features(self.tokenizer, train_input_ids, train_y, FLAGS.pad_token_label_id)
            valid_input_ids, valid_labels, valid_attention_masks = self._convert_example_to_features(self.tokenizer, valid_input_ids, valid_y, FLAGS.pad_token_label_id)

            nlu_train_set = TensorDataset(train_input_ids, train_attention_masks, train_labels)
            nlu_valid_set = TensorDataset(valid_input_ids, valid_attention_masks, valid_labels)

            torch.save(nlu_train_set, os.path.join(FLAGS.cache_dir, 'train.set'))
            torch.save(nlu_valid_set, os.path.join(FLAGS.cache_dir, 'valid.set'))
Пример #26
0
def getfiles(userpath):
	filepath=[]
	userpath = os.path.abspath(userpath)
	contents=os.walk(userpath)
	temp = contents
	temp_list=list(temp)
	if len(temp_list)==0:	#This means that either the path points to a single file or a non-existent file/folder.
		try:
			with open(userpath) as f:	
				pass	
			return userpath.split()	#Applied split function to convert the string to a list.
		except IOError:
			print 'Invalid path.'
			sys.exit()

	contents=os.walk(userpath)
	raw_files = contents.next()
	files = sh.ls(str(raw_files[0]), '-R')
	files = str(files).split()
	ff = []
	for i in xrange(len(files)):
		if files[i][-1] == ':':
			folder = files[i][:-1]
			continue
		try:
			sh.cd(folder + '/' + files[i])
			continue
		except OSError:
			ff.append(folder + '/' + files[i])
	return ff
Пример #27
0
def trial(num_bins=1, size_bin=500, after_rm=None, max_delta=0.05):
    from sh import imgbase, rm, ls

    def img_free():
        return float(imgbase("layout", "--free-space"))

    imgbase = imgbase.bake("--debug")

    a = img_free()

    [dd(B, size_bin) for B in iter(range(0, num_bins))]
    print("Files which were created")
    print(ls("-shal", *glob.glob("/var/tmp/*.bin")))
    b = img_free()

    print("Files are getting removed")
    rm("-f", *glob.glob("/var/tmp/*.bin"))
    after_rm()
    c = img_free()

    ratio = a / c
    print(a, b, c, ratio)
    delta = 1 - ratio
    assert delta < max_delta, \
        "Delta %s is larger than %s" % (delta, max_delta)
def test_generation(cookies, context):
    """Generated project should replace all variables."""
    result = cookies.bake(extra_context=context)
    assert result.exception is None
    assert result.exit_code == 0
    assert result.project.basename == context['repo_name']
    assert result.project.isdir()

    paths = build_files_list(str(result.project))
    assert paths
    check_paths(paths)
    base_path = str(result.project)
    # Run Flake 8
    try:
        sh.flake8.bake('{path}/setup.py {path}/{namespace}'.format(
            path=base_path,
            namespace=context['namespace']
        ))
    except sh.ErrorReturnCode as e:
        pytest.fail(e)
    # Run tests
    try:
        ls = sh.ls('{path}/env/bin/'.format(path=base_path))
        cmd = 'pytest'
        if 'pytest' in str(ls.stdout):
            cmd = './env/bin/pytest'
        proc = subprocess.Popen(
            [cmd],
            shell=sys.platform.startswith('win'),
            cwd=base_path
        )
        proc.wait()
    except Exception as e:
        print(ls.stdout)
        pytest.fail(e)
Пример #29
0
def split_ann(ann_file):
    if 'tmp' not in ls():
        mkdir('tmp')
    parser = BeautifulSoup(open(ann_file))
    for mistake in parser.find_all('mistake'):
        with open('tmp/%s' % mistake.attrs['nid'], 'a') as f:
            f.write(mistake.__str__())
Пример #30
0
def split_ann(ann_file):
    if 'tmp' not in ls():
        mkdir('tmp')
    parser = BeautifulSoup(open(ann_file))
    for mistake in parser.find_all('mistake'):
        with open('tmp/%s' % mistake.attrs['nid'], 'a') as f:
            f.write(mistake.__str__())
Пример #31
0
def test_link_package_repos(product):
    """Test links made from doc repo to package repos."""
    print(sh.ls('-al', product.doc_dir))

    for package_name, package_data in product.manifest.packages.items():
        print(package_name)
        # test that the package's directory exists in docs
        package_dir = os.path.join(product.build_dir,
                                   product.manifest.doc_repo_name,
                                   str(package_name))
        print(package_dir)
        assert os.path.isdir(package_dir)

        # test that the packages doc/_static directory is linked
        package_static_dir = os.path.join(package_data['dir'],
                                          'doc', '_static', package_name)
        if os.path.exists(package_static_dir):
            doc_package_static_dir = os.path.join(product.doc_dir,
                                                  '_static', str(package_name))
            assert os.path.islink(doc_package_static_dir)
            assert os.path.isdir(doc_package_static_dir)

        # test that individual entities of a package's doc (aside from _static)
        # are linked
        source_dir = os.path.join(package_data['dir'], 'doc')
        print('source_dir', source_dir)
        print(os.listdir(source_dir))
        target_dir = os.path.join(product.doc_dir, str(package_name))
        for entity in os.listdir(source_dir):
            print('entity', entity)
            if entity in product.package_excludes:
                continue
            link_name = os.path.join(target_dir, entity)
            assert os.path.islink(link_name)
            assert os.path.lexists(link_name)
Пример #32
0
    def test_bake_args_come_first(self):
        from sh import ls
        ls = ls.bake(h=True)

        ran = ls("-la").ran
        ft = ran.index("-h")
        self.assertTrue("-la" in ran[ft:])
Пример #33
0
Файл: test.py Проект: ahhentz/sh
 def test_incremental_composition(self):
     from sh import ls, wc
     c1 = int(wc(ls("-A1", _piped=True), l=True).strip())
     c2 = len(os.listdir("."))
     if c1 != c2:
         with open("/tmp/fail", "a") as h: h.write("F**K\n")
     self.assertEqual(c1, c2)
Пример #34
0
    def test_background_exception(self):
        from sh import ls, ErrorReturnCode_1, ErrorReturnCode_2
        p = ls("/ofawjeofj", _bg=True)  # should not raise

        exc_to_test = ErrorReturnCode_2
        if IS_OSX: exc_to_test = ErrorReturnCode_1
        self.assertRaises(exc_to_test, p.wait)  # should raise
Пример #35
0
Файл: test.py Проект: ahhentz/sh
 def test_bake_args_come_first(self):
     from sh import ls
     ls = ls.bake(h=True)
     
     ran = ls("-la").ran
     ft = ran.index("-h")
     self.assertTrue("-la" in ran[ft:]) 
Пример #36
0
Файл: test.py Проект: ahhentz/sh
 def test_background_exception(self):
     from sh import ls, ErrorReturnCode_1, ErrorReturnCode_2
     p = ls("/ofawjeofj", _bg=True) # should not raise
     
     exc_to_test = ErrorReturnCode_2
     if IS_OSX: exc_to_test = ErrorReturnCode_1
     self.assertRaises(exc_to_test, p.wait) # should raise
Пример #37
0
    def prepare_data(self):  # 数据的预处理转换
        has_cache_files = False

        try:
            cache_dir = sh.ls(FLAGS.cache_dir)
            if 'train.set' in cache_dir and 'valid.set' in cache_dir:
                has_cache_files = True
        except Exception as e:
            logging.error(e)
            sh.mkdir(FLAGS.cache_dir)

        if not has_cache_files:
            data_list = self._preprocess_raw_data(self.tokenizer)
            train_list, valid_list = train_test_split(
                data_list, test_size=FLAGS.test_size, random_state=0)

            train_input_ids, train_attention_mask = self._seq_padding(
                self.tokenizer, train_list)
            valid_input_ids, valid_attention_mask = self._seq_padding(
                self.tokenizer, valid_list)

            dialogue_train_set = TensorDataset(train_input_ids,
                                               train_attention_mask)
            dialogue_valid_set = TensorDataset(valid_input_ids,
                                               valid_attention_mask)

            torch.save(dialogue_train_set,
                       os.path.join(FLAGS.cache_dir, 'train.set'))
            torch.save(dialogue_valid_set,
                       os.path.join(FLAGS.cache_dir, 'valid.set'))
Пример #38
0
def test_link_package_repos(product):
    """Test links made from doc repo to package repos."""
    print(sh.ls('-al', product.doc_dir))

    for package_name, package_data in product.manifest.packages.items():
        print(package_name)
        # test that the package's directory exists in docs
        package_dir = os.path.join(product.build_dir,
                                   product.manifest.doc_repo_name,
                                   str(package_name))
        print(package_dir)
        assert os.path.isdir(package_dir)

        # test that the packages doc/_static directory is linked
        package_static_dir = os.path.join(package_data['dir'], 'doc',
                                          '_static', package_name)
        if os.path.exists(package_static_dir):
            doc_package_static_dir = os.path.join(product.doc_dir, '_static',
                                                  str(package_name))
            assert os.path.islink(doc_package_static_dir)
            assert os.path.isdir(doc_package_static_dir)

        # test that individual entities of a package's doc (aside from _static)
        # are linked
        source_dir = os.path.join(package_data['dir'], 'doc')
        print('source_dir', source_dir)
        print(os.listdir(source_dir))
        target_dir = os.path.join(product.doc_dir, str(package_name))
        for entity in os.listdir(source_dir):
            print('entity', entity)
            if entity in product.package_excludes:
                continue
            link_name = os.path.join(target_dir, entity)
            assert os.path.islink(link_name)
            assert os.path.lexists(link_name)
Пример #39
0
def read_nlu_data():
    try:
        cache_dir = sh.ls(FLAGS.cache_dir)
        if 'id2class.set' in cache_dir and 'intent_examples.set' in cache_dir:
            id2class_path = os.path.join(FLAGS.cache_dir, 'id2class.set')
            id2class_lock_path = id2class_path + '.lock'
            intent_examples_path = os.path.join(FLAGS.cache_dir,
                                                'intent_examples.set')
            intent_examples_lock_path = intent_examples_path + '.lock'

            with FileLock(id2class_lock_path):
                id2class = torch.load(id2class_path)

            with FileLock(intent_examples_lock_path):
                intent_examples = torch.load(intent_examples_path)

            return id2class, intent_examples
    except Exception as e:
        logging.error(e)
        sh.mkdir(FLAGS.cache_dir)

    data = load_data(FLAGS.data_dir, 'zh')
    id2class = dict(enumerate(data.intents))
    intent_examples = data.intent_examples

    torch.save(id2class, os.path.join(FLAGS.cache_dir, 'id2class.set'))
    torch.save(intent_examples,
               os.path.join(FLAGS.cache_dir, 'intent_examples.set'))

    return id2class, intent_examples
Пример #40
0
    def prepare_data(self): # 数据的预处理转换
        has_cache_files = False
        
        try:
            cache_dir = sh.ls(FLAGS.cache_dir)
            if 'train.set' in cache_dir and 'valid.set' in cache_dir:
                has_cache_files = True
        except Exception as e:
            logging.error(e)
            sh.mkdir(FLAGS.cache_dir)

        if not has_cache_files:
            X, Y = [], []

            for msg in self.intent_examples:
                X.append(msg.text)
                Y.append(self.class2id[msg.get('intent')])
            
            X, _ = self._convert_text_to_ids(self.tokenizer, X, FLAGS.max_seq_length)
            train_x, valid_x, train_y, valid_y = train_test_split(X, Y, test_size=0.3, random_state=0)

            train_input_ids, train_attention_mask = self._seq_padding(self.tokenizer, train_x)
            train_labels = torch.tensor(train_y, dtype=torch.long)

            valid_input_ids, valid_attention_mask = self._seq_padding(self.tokenizer, valid_x)
            valid_labels = torch.tensor(valid_y, dtype=torch.long)

            nlu_train_set = TensorDataset(train_input_ids, train_attention_mask, train_labels)
            nlu_valid_set = TensorDataset(valid_input_ids, valid_attention_mask, valid_labels)

            torch.save(nlu_train_set, os.path.join(FLAGS.cache_dir, 'train.set'))
            torch.save(nlu_valid_set, os.path.join(FLAGS.cache_dir, 'valid.set'))
Пример #41
0
    def test_bake_args_come_first(self):
        from sh import ls
        ls = ls.bake(full_time=True)

        ran = ls("-la").command_ran
        ft = ran.index("full-time")
        self.assertTrue("-la" in ran[ft:])
Пример #42
0
def get_file_names(path='.'):
    '''
     return only file names on the current directory
     with bash it looks like
     ls -l | sed -E  -e '1d; s/^([^ ]+ +){8}//'
     will check how piping looks like
    '''
    return sed(ls(path, "-l"), "-E", "-e", '1d; s/^([^ ]+ +){8}//' )
Пример #43
0
 def getGitRepositoriesChanges(self,path):
     cd(path)
     print 'PATH',path
     print 'LS',ls()
     try:
         git('remote','update')
     except Exception, e:
         raise
Пример #44
0
def test_piping():
    from sh import sort, du, glob, wc, ls

    # sort this directory by biggest file
    print sort(du(glob('*'), '-sb'), '-rn')

    # print the number of folders and files in /etc
    print wc(ls('/etc', '-l'), '-l')
Пример #45
0
def get_share_names(rootifs):
    rc_lab_share_info =[]
    for isi in rootifs:
        #rc_lab_dirs.append(sh.ls(isi))
        for directory in sh.ls(isi).split():
            share_dict = {'top': isi,'name':directory}
            rc_lab_share_info.append(share_dict)
    return rc_lab_share_info
Пример #46
0
 def downloaded(self):
     linkName = __file__[:__file__.find(__file__.split('/')[-1])]
     linkName += 'static/youtubedl/'
     fileName = self.name + '.mkv'
     content = ''.join(i for i in sh.ls(linkName))
     if (fileName in content):
         return True
     else:
         return False
Пример #47
0
    def read_temperature(self):
        try:
            wire_devices = ls("/sys/bus/w1/devices")

        except ErrorReturnCode_2:
            logger.error("Path for wire devices doesn't exist")
            return "Enable to read temperature"
        self.temperature = 12
        return self.temperature
Пример #48
0
def assert_git_notes(hgsha1s):
    gitrepo = os.getcwd()
    sh.cd(".git/refs/notes")
    notes_refs = sh.ls(sh.glob("hg-*")).stdout.splitlines()
    sh.cd(gitrepo)
    sh.git.notes("--ref=hg", "merge", *notes_refs)
    output = sh.git.log(pretty="format:%N", notes="hg").stdout
    notes = [line for line in output.splitlines() if line]
    assert notes == hgsha1s
Пример #49
0
    def build_arch(self, arch=None):
        junk = ['sqlite', 'ssl', 'ffi', 'crypto' ]
        libs_dir = self.ctx.get_libs_dir(arch.arch)
        print (sh.ls('-l','{}'.format(libs_dir)))
        extra_libs = [sh.glob(join('{}', '*' + j + '*').format(libs_dir)) for j in junk]
        if not extra_libs:
            info('No junk found.')
        else:
            for libs in extra_libs:
                for lso in libs:
                    warning (lso)

        python_install_dirs = glob.glob(join(self.ctx.python_installs_dir, '*'))
        for python_install in python_install_dirs:
            debug (sh.ls('-l','{}'.format(python_install)))
            exe_files =  sh.glob(join('{}', 'setuptools', '*.exe').format(python_install))
            for f in exe_files:
                print (sh.rm(f))
Пример #50
0
    def _read_nlu_data(self):
        try:
            cache_dir = sh.ls(FLAGS.cache_dir)
            if 'id2entity.set' in cache_dir and 'entity_examples.set' in cache_dir and 'id2class.set' in cache_dir and 'intent_examples.set' in cache_dir:
                id2entity_path = os.path.join(FLAGS.cache_dir, 'id2entity.set')
                id2entity_lock_path = id2entity_path + '.lock'

                entity_examples_path = os.path.join(FLAGS.cache_dir, 'entity_examples.set')
                entity_examples_lock_path = entity_examples_path + '.lock'

                id2class_path = os.path.join(FLAGS.cache_dir, 'id2class.set')
                id2class_lock_path = id2class_path + '.lock'

                intent_examples_path = os.path.join(FLAGS.cache_dir, 'intent_examples.set')
                intent_examples_lock_path = intent_examples_path + '.lock'
                
                with FileLock(id2entity_lock_path):
                    id2entity = torch.load(id2entity_path)

                with FileLock(entity_examples_lock_path):
                    entity_examples = torch.load(entity_examples_path)

                with FileLock(id2class_lock_path):
                    id2class = torch.load(id2class_path)

                with FileLock(intent_examples_lock_path):
                    intent_examples = torch.load(intent_examples_path)
                
                return id2entity, entity_examples, id2class, intent_examples
        except Exception as e:
            logging.error(e)
            sh.mkdir(FLAGS.cache_dir)
        
        data = load_data(FLAGS.data_dir, 'zh')
        entity_lists, entity_examples_cooked, intent_examples = ['O'], [], []

        for item in data.training_examples:
            training_text = item.text
            training_data = item.data

            entity_examples_cooked.append(self._predata(training_text, training_data.get("entities", [])))
            intent_examples.append(training_data.get("intent", None))

        for entity in data.entities:
            for tag in ['B', 'I']:
                entity_lists.append(tag + '-' + entity)

        id2entity = dict(enumerate(entity_lists))
        id2class = dict(enumerate(data.intents))

        torch.save(id2entity, os.path.join(FLAGS.cache_dir, 'id2entity.set'))
        torch.save(entity_examples_cooked, os.path.join(FLAGS.cache_dir, 'entity_examples.set'))

        torch.save(id2class, os.path.join(FLAGS.cache_dir, 'id2class.set'))
        torch.save(intent_examples, os.path.join(FLAGS.cache_dir, 'intent_examples.set'))

        return id2entity, entity_examples_cooked, id2class, intent_examples
Пример #51
0
async def test_snapshot_restore(deploy, event_loop):
    """
    Trigger snapshot and restore actions
    """
    from sh import juju, ls
    controller, model = deploy
    etcd = await model.deploy(str(ETCD_CHARM_PATH))
    await model.deploy('cs:~containers/easyrsa')
    await model.add_relation('easyrsa:client', 'etcd:certificates')

    await etcd.set_config({'channel': '3.2/stable'})
    await asyncify(_juju_wait)(controller, model.info.name)

    for unit in etcd.units:
        leader = await unit.is_leader_from_status()
        if leader:
            # Load dummy data
            await load_data(unit)
            for ver in ['v2', 'v3']:
                assert await is_data_present(unit, ver)
            filenames = {}
            for dataset in ['v2', 'v3']:
                # Take snapshot of data
                action = await unit.run_action('snapshot',
                                               **{'keys-version': dataset})
                action = await action.wait()
                assert action.status == 'completed'
                src = Path(action.results['snapshot']['path'])
                dst = Path(action.results['snapshot']['path']).name
                await unit.scp_from(str(src), str(dst))
                filenames[dataset] = str(dst)
                out = ls('-l', 'result*')
                print(out.stdout.decode().strip())

            await delete_data(unit)
            for ver in ['v2', 'v3']:
                assert await is_data_present(unit, ver) is False

            # Restore v2 data
            # Note: libjuju does not implement attach yet.
            juju('attach', '-m', "{}:{}".format(controller, model.info.name),
                 'etcd', "snapshot='./{}'".format(str(filenames['v2'])))
            action = await unit.run_action('restore')
            action = await action.wait()
            assert action.status == 'completed'
            for ver in ['v2', 'v3']:
                assert await is_data_present(unit, ver) is True

            # Restore v3 data
            juju('attach', '-m', "{}:{}".format(controller, model.info.name),
                 'etcd', "snapshot='./{}'".format(str(filenames['v3'])))

            action = await unit.run_action('restore')
            action = await action.wait()
            await action.status == 'completed'
            for ver in ['v2', 'v3']:
                assert await is_data_present(unit, ver) is True
Пример #52
0
Файл: cp.py Проект: yunify/qsctl
def step_impl(context):
    output = sh.ls("tmp").stdout.decode("utf-8")
    ok = True
    for row in context.table:
        if row["name"] not in output:
            ok = False
            break
    assert_that(ok).is_equal_to(True)

    sh.rm("-rf", "tmp")
Пример #53
0
Файл: test.py Проект: ahhentz/sh
    def test_command_wrapper(self):
        from sh import Command, which
        
        ls = Command(which("ls"))
        wc = Command(which("wc"))

        c1 = int(wc(ls("-A1"), l=True))
        c2 = len(os.listdir("."))

        self.assertEqual(c1, c2)
Пример #54
0
def boot(auth=True):

    # kill mongo
    kill()

    time.sleep(1)

    # wipe mongo
    wipe()

    time.sleep(1)

    # start mongo without auth
    start(auth=False)

    time.sleep(1)

    if isyes(auth):

        # create users
        admin()

        time.sleep(1)

        # restart with auth
        kill()

        time.sleep(10)

        start(auth=auth)

        time.sleep(1)


    config = cm_config_server().get("cloudmesh.server.mongo")
    path = path_expand(config["path"])
    banner(path)
    print ls(path)
    banner("PROCESS")
    with settings(warn_only=True):
        local("ps -ax | fgrep mongo")
Пример #55
0
def get_image_paths(sat_dir=SAT_DIR):
    """
    By listing contents of the satellite data dir, get a dict
    of 'yyyy/mm': file_path pairs indicating where to find each year and
    month of data
    """

    names = sh.ls(sat_dir).stdout.strip().split()
    names = filter(lambda x: x.endswith('.tif'), names)
    yms = [get_ym_from_fname(n) for n in names]
    keys = ['%04d/%02d' % (y, m) for y, m in yms]
    return {k: os.path.join(sat_dir, n) for k, n in zip(keys, names)}
Пример #56
0
 def IsConfigured(self):
     if self.IsConnected() and self.IsInstalled():
         dev_path = '/dev/bus/usb/%s/%s' % (self.usb_bus_num, self.usb_dev_num)
         ret = sh.ls(dev_path,'-l')
         with sh.sudo:
             ret = sh.chmod('777',dev_path)
             ret = sh.modprobe('-r', 'ds2490')            
         self._configured = True    
     else:
         self._configured = False
     
     return self._configured
Пример #57
0
    def test_out_redirection(self):
        import tempfile
        from sh import ls

        file_obj = tempfile.TemporaryFile()
        out = ls(_out=file_obj)

        file_obj.seek(0)
        actual_out = file_obj.read()
        file_obj.close()

        self.assertTrue(len(actual_out) != 0)