示例#1
0
    def test_load_from_file_with_relative_paths(self):
        """
        When explicitly setting a config file, paths should be relative to the
        config file, not the working directory.
        """

        config_dir = TemporaryDirectory()
        config_fname = os.path.join(config_dir.name, 'mkdocs.yml')
        docs_dir = os.path.join(config_dir.name, 'src')
        os.mkdir(docs_dir)

        config_file = open(config_fname, 'w')

        try:
            config_file.write("docs_dir: src\nsite_name: MkDocs Test\n")
            config_file.flush()
            config_file.close()

            cfg = base.load_config(config_file=config_file)
            self.assertTrue(isinstance(cfg, base.Config))
            self.assertEqual(cfg['site_name'], 'MkDocs Test')
            self.assertEqual(cfg['docs_dir'], docs_dir)
            self.assertEqual(cfg.config_file_path, config_fname)
            self.assertIsInstance(cfg.config_file_path, utils.text_type)
        finally:
            config_dir.cleanup()
示例#2
0
class DiskJobResultTests(TestCase):

    def setUp(self):
        self.scratch_dir = TemporaryDirectory()

    def tearDown(self):
        self.scratch_dir.cleanup()

    def test_smoke(self):
        result = DiskJobResult({})
        self.assertEqual(str(result), "None")
        self.assertEqual(repr(result), "<DiskJobResult outcome:None>")
        self.assertIsNone(result.outcome)
        self.assertIsNone(result.comments)
        self.assertEqual(result.io_log, ())
        self.assertIsNone(result.return_code)

    def test_everything(self):
        result = DiskJobResult({
            'outcome': IJobResult.OUTCOME_PASS,
            'comments': "it said blah",
            'io_log_filename': make_io_log([
                (0, 'stdout', b'blah\n')
            ], self.scratch_dir.name),
            'return_code': 0
        })
        self.assertEqual(str(result), "pass")
        self.assertEqual(repr(result), "<DiskJobResult outcome:'pass'>")
        self.assertEqual(result.outcome, IJobResult.OUTCOME_PASS)
        self.assertEqual(result.comments, "it said blah")
        self.assertEqual(result.io_log, ((0, 'stdout', b'blah\n'),))
        self.assertEqual(result.return_code, 0)
示例#3
0
文件: helpers.py 项目: AKSW/QuitStore
class TemporaryRepository(object):
    """A Git repository initialized in a temporary directory as a context manager.

    usage:
    with TemporaryRepository() as tempRepo:
        print("workdir:", tempRepo.workdir)
        print("path:", tempRepo.path)
        index = repo.index
        index.read()
        index.add("...")
        index.write()
        tree = index.write_tree()
        repo.create_commit('HEAD', author, comitter, message, tree, [])
    """

    def __init__(self, is_bare=False, clone_from_repo=None):
        self.temp_dir = TemporaryDirectory()
        if clone_from_repo:
            self.repo = clone_repository(clone_from_repo.path, self.temp_dir.name)
        else:
            self.repo = init_repository(self.temp_dir.name, is_bare)

    def __enter__(self):
        return self.repo

    def __exit__(self, type, value, traceback):
        self.temp_dir.cleanup()
示例#4
0
class TestStatelog(unittest.TestCase):
    def setUp(self):
        self.tmpdir = TemporaryDirectory()
        self.logpath = os.path.join(self.tmpdir.name, 'statelog')
        self.nonexist = os.path.join(self.tmpdir.name, 'nonexist')

        with open(self.logpath, 'wb') as fw:
            fw.write(b'001\n')
            fw.write(b'002\n')

    def tearDown(self):
        self.tmpdir.cleanup()

    def test_load(self):
        state = pop3.statelog_load(self.logpath)
        self.assertEqual(state, {b'001', b'002'})

    def test_load_fallback(self):
        state = pop3.statelog_load(self.nonexist)
        self.assertEqual(state, set())

    def test_create(self):
        pop3.statelog_save(self.logpath, {b'001', b'002'})
        with open(self.logpath, 'rb') as fp:
            self.assertEqual(fp.readline(), b'001\n')
            self.assertEqual(fp.readline(), b'002\n')
示例#5
0
class NamedFileInTemporaryDirectory(object):

    def __init__(self, filename, mode='w+b', bufsize=-1, **kwds):
        """
        Open a file named `filename` in a temporary directory.

        This context manager is preferred over `NamedTemporaryFile` in
        stdlib `tempfile` when one needs to reopen the file.

        Arguments `mode` and `bufsize` are passed to `open`.
        Rest of the arguments are passed to `TemporaryDirectory`.

        """
        self._tmpdir = TemporaryDirectory(**kwds)
        path = _os.path.join(self._tmpdir.name, filename)
        self.file = open(path, mode, bufsize)

    def cleanup(self):
        self.file.close()
        self._tmpdir.cleanup()

    __del__ = cleanup

    def __enter__(self):
        return self.file

    def __exit__(self, type, value, traceback):
        self.cleanup()
示例#6
0
class NamedFileInTemporaryDirectory(object):
    """Open a file named `filename` in a temporary directory.
    
    This context manager is preferred over :class:`tempfile.NamedTemporaryFile`
    when one needs to reopen the file, because on Windows only one handle on a
    file can be open at a time. You can close the returned handle explicitly
    inside the context without deleting the file, and the context manager will
    delete the whole directory when it exits.

    Arguments `mode` and `bufsize` are passed to `open`.
    Rest of the arguments are passed to `TemporaryDirectory`.
    
    Usage example::
    
        with NamedFileInTemporaryDirectory('myfile', 'wb') as f:
            f.write('stuff')
            f.close()
            # You can now pass f.name to things that will re-open the file
    """
    def __init__(self, filename, mode='w+b', bufsize=-1, **kwds):
        self._tmpdir = TemporaryDirectory(**kwds)
        path = _os.path.join(self._tmpdir.name, filename)
        self.file = open(path, mode, bufsize)

    def cleanup(self):
        self.file.close()
        self._tmpdir.cleanup()

    __del__ = cleanup

    def __enter__(self):
        return self.file

    def __exit__(self, type, value, traceback):
        self.cleanup()
示例#7
0
def test_dcm2niix_run():
    dicomDir = os.path.join(TEST_DATA_DIR, "sourcedata", "sub-01")
    tmpBase = os.path.join(TEST_DATA_DIR, "tmp")

    #tmpDir = TemporaryDirectory(dir=tmpBase)
    tmpDir = TemporaryDirectory()

    app = Dcm2niix([dicomDir], tmpDir.name)
    app.run()

    helperDir = os.path.join(
            tmpDir.name, DEFAULT.tmpDirName, DEFAULT.helperDir, "*")
    ls = sorted(glob(helperDir))
    firstMtime = [os.stat(_).st_mtime for _ in ls]
    assert 'localizer_20100603125600' in ls[0]

    #files should not be change after a rerun
    app.run()
    secondMtime = [os.stat(_).st_mtime for _ in ls]
    assert firstMtime == secondMtime

    #files should be change after a forced rerun
    app.run(force=True)
    thirdMtime = [os.stat(_).st_mtime for _ in ls]
    assert firstMtime != thirdMtime

    tmpDir.cleanup()
示例#8
0
class GitRepositoryTest(TestCase):
    def setUp(self):
        self.tmpdir = TemporaryDirectory()
        self.repo1 = GitRepository(
            self.tmpdir.name,
            url="https://github.com/st-tu-dresden/inloop.git",
            branch="master"
        )
        self.repo2 = GitRepository(
            self.tmpdir.name,
            url="https://github.com/st-tu-dresden/inloop-java-repository-example.git",
            branch="master"
        )

    def tearDown(self):
        self.tmpdir.cleanup()

    def test_git_operations(self):
        self.repo1.synchronize()
        self.assertTrue(self.get_path(".git").exists())
        self.assertTrue(self.get_path("manage.py").exists())
        self.assertEqual(b"", self.run_command("git status -s"))

        self.repo2.synchronize()
        self.assertFalse(self.get_path("manage.py").exists())
        self.assertTrue(self.get_path("build.xml").exists())
        self.assertEqual(b"", self.run_command("git status -s"))

    def get_path(self, name):
        return Path(self.tmpdir.name).joinpath(name)

    def run_command(self, command):
        return check_output(command.split(), cwd=self.tmpdir.name)
示例#9
0
    def testInitNotExistingsRepo(self):
        dir = TemporaryDirectory()

        repo = quit.git.Repository(dir.name, create=True)
        self.assertFalse(repo.is_bare)
        self.assertEqual(len(repo.revisions()), 0)

        dir.cleanup()
示例#10
0
 def save_document(self):
     tempdirectory = TemporaryDirectory()
     document = self.generate_document(tempdirectory.name)
     if document:
         with open(document, 'rb') as f:
             self.data_file.save(path.basename(document), File(f))
             self.last_update_of_data_file = datetime.datetime.now()
     tempdirectory.cleanup()
示例#11
0
def test_dcm2bids():
    tmpBase = os.path.join(TEST_DATA_DIR, "tmp")
    #bidsDir = TemporaryDirectory(dir=tmpBase)
    bidsDir = TemporaryDirectory()

    tmpSubDir = os.path.join(bidsDir.name, DEFAULT.tmpDirName, "sub-01")
    shutil.copytree(
            os.path.join(TEST_DATA_DIR, "sidecars"),
            tmpSubDir)

    app = Dcm2bids(
            [TEST_DATA_DIR], "01",
            os.path.join(TEST_DATA_DIR, "config_test.json"),
            bidsDir.name
            )
    app.run()
    layout = BIDSLayout(bidsDir.name, validate=False)

    assert layout.get_subjects() == ["01"]
    assert layout.get_sessions() == []
    assert layout.get_tasks() == ["rest"]
    assert layout.get_runs() == [1,2,3]

    app = Dcm2bids(
            [TEST_DATA_DIR], "01",
            os.path.join(TEST_DATA_DIR, "config_test.json"),
            bidsDir.name
            )
    app.run()


    fmapFile = os.path.join(
            bidsDir.name, "sub-01", "fmap", "sub-01_echo-492_fmap.json")
    data = load_json(fmapFile)
    fmapMtime = os.stat(fmapFile).st_mtime
    assert data["IntendedFor"] == "dwi/sub-01_dwi.nii.gz"

    data = load_json(os.path.join(
        bidsDir.name, "sub-01", "localizer", "sub-01_run-01_localizer.json"))
    assert data["ProcedureStepDescription"] == "Modify by dcm2bids"

    #rerun
    shutil.rmtree(tmpSubDir)
    shutil.copytree(
            os.path.join(TEST_DATA_DIR, "sidecars"),
            tmpSubDir)

    app = Dcm2bids(
            [TEST_DATA_DIR], "01",
            os.path.join(TEST_DATA_DIR, "config_test.json"),
            bidsDir.name
            )
    app.run()

    fmapMtimeRerun = os.stat(fmapFile).st_mtime
    assert fmapMtime == fmapMtimeRerun

    bidsDir.cleanup()
示例#12
0
    def testCloneRepo(self):
        REMOTE_NAME = 'origin'
        REMOTE_URL = 'git://github.com/AKSW/QuitStore.example.git'

        dir = TemporaryDirectory()
        repo = quit.git.Repository(dir.name, create=True, origin=REMOTE_URL)
        self.assertTrue(path.exists(path.join(dir.name, 'example.nq')))
        self.assertFalse(repo.is_bare)
        dir.cleanup()
示例#13
0
class MyTest(TestCase):
    def setUp(self):
        self.test_dir = TemporaryDirectory()
    def tearDown(self):
        self.test_dir.cleanup()
    # Test methods follow
    # 2016.07.08 add
    def test_sample(self):
        print(self.test_dir)
示例#14
0
    def testCloneNotExistingRepo(self):
        environ["QUIT_SSH_KEY_HOME"] = "./tests/assets/sshkey/"

        REMOTE_URL = '[email protected]:AKSW/ThereIsNoQuitStoreRepo.git'

        dir = TemporaryDirectory()
        with self.assertRaises(Exception) as context:
            quit.git.Repository(dir.name, create=True, origin=REMOTE_URL)
        dir.cleanup()
示例#15
0
    def testCloneRepoViaSSH(self):
        environ["QUIT_SSH_KEY_HOME"] = "./tests/assets/sshkey/"

        REMOTE_URL = '[email protected]:AKSW/QuitStore.example.git'

        dir = TemporaryDirectory()
        repo = quit.git.Repository(dir.name, create=True, origin=REMOTE_URL)
        self.assertTrue(path.exists(path.join(dir.name, 'example.nt')))
        self.assertFalse(repo.is_bare)
        dir.cleanup()
示例#16
0
class LoadScriptTest(unittest.TestCase):

    def setUp(self):
        self.tempdir = TemporaryDirectory()

        for filename, contents in self.script_files:
            with open(os.path.join(self.tempdir.name, filename), "xt") as f:
                f.write(contents)

    def tearDown(self):
        self.tempdir.cleanup()
示例#17
0
    def testCloneRepoViaSSHNoKeyFiles(self):
        environ["QUIT_SSH_KEY_HOME"] = "./tests/assets/nosshkey/"
        if "SSH_AUTH_SOCK" in environ:
            del environ["SSH_AUTH_SOCK"]

        REMOTE_URL = '[email protected]:AKSW/QuitStore.example.git'

        dir = TemporaryDirectory()
        with self.assertRaises(Exception) as context:
            quit.git.Repository(dir.name, create=True, origin=REMOTE_URL)
        dir.cleanup()
示例#18
0
class BaseRecorderTest(unittest.TestCase):
    def setUp(self):
        unittest.TestCase.setUp(self)
        self.original_dir = os.getcwd()
        self.temp_dir = TemporaryDirectory()
        os.chdir(self.temp_dir.name)

    def tearDown(self):
        self.temp_dir.cleanup()
        os.chdir(self.original_dir)
        unittest.TestCase.tearDown(self)
示例#19
0
class CLITestCase(unittest.TestCase):
    def setUp(self):
        self.temp_dir = TemporaryDirectory()
        self.output_file_index = 0

    def tearDown(self):
        self.temp_dir.cleanup()

    def exec(self, string):
        args = ['-b', self.temp_dir.name]
        args.extend(shlex.split(string))
        process_args(args)

    def list_output(self, string=''):
        temp_text_file = os.path.join(self.temp_dir.name, 
                'temp{}.txt'.format(self.output_file_index))
        self.output_file_index += 1

        self.exec('list --pipe-to "cat > {}" {}'.format(temp_text_file, string))
        with open(temp_text_file) as f:
            text = f.read()
        return text

    def populate(self, num_entries=20):
        for i in range(num_entries):
            self.exec('new -m "{}"'.format(shlex.quote(random_text())))

    def test_list(self):
        self.populate(20)
        text = self.list_output()
        self.assertTrue(len(text.split('\n'))>20*3)

    def test_edit(self):
        self.exec('new --message "Hello world"')
        original_text = self.list_output()

        self.exec('edit -m "New text"')
        modified_text = self.list_output()

        self.assertTrue(re.search('Hello world', original_text))
        self.assertTrue(re.search('New text', modified_text))
        self.assertTrue(re.sub('Hello world', 'New text', original_text))

    def test_search(self):
        self.populate(2)
        search_string = 'stringthatwontgetgeneratedbyaccident'
        self.exec('new -m "hello world\n test text {}inthisentry"'.format(search_string))
        self.populate(2)
        all_entries = self.list_output()
        search_matches = self.list_output(search_string)

        self.assertNotEqual(all_entries, search_matches)
        self.assertTrue(re.search(search_string, all_entries))
        self.assertTrue(re.search(search_string, search_matches))
示例#20
0
class TestCaseSnapshotPath(TestCaseCfg):
    def setUp(self):
        super(TestCaseSnapshotPath, self).setUp()
        #use a new TemporaryDirectory for snapshotPath to avoid
        #side effects on leftovers
        self.tmpDir = TemporaryDirectory()
        self.cfg.dict['profile1.snapshots.path'] = self.tmpDir.name
        self.snapshotPath = self.cfg.snapshotsFullPath()

    def tearDown(self):
        super(TestCaseSnapshotPath, self).tearDown()
        self.tmpDir.cleanup()
class FileSystemBackendTest(unittest.TestCase):

    def setUp(self) -> None:
        self.tmpdir = TemporaryDirectory()
        self.cwd = os.getcwd()
        os.chdir(self.tmpdir.name)
        dirname = 'fsbackendtest'
        os.mkdir(dirname) # replace by temporary directory
        self.backend = FilesystemBackend(dirname)
        self.testdata = 'dshiuasduzchjbfdnbewhsdcuzd'
        self.alternative_testdata = "8u993zhhbn\nb3tadgadg"
        self.identifier = 'some name'

    def tearDown(self) -> None:
        os.chdir(self.cwd)
        self.tmpdir.cleanup()

    def test_put_and_get_normal(self) -> None:
        # first put the data
        self.backend.put(self.identifier, self.testdata)

        # then retrieve it again
        data = self.backend.get(self.identifier)
        self.assertEqual(data, self.testdata)

    def test_put_file_exists_no_overwrite(self) -> None:
        name = 'test_put_file_exists_no_overwrite'
        self.backend.put(name, self.testdata)
        with self.assertRaises(FileExistsError):
            self.backend.put(name, self.alternative_testdata)
        self.assertEqual(self.testdata, self.backend.get(name))

    def test_put_file_exists_overwrite(self) -> None:
        name = 'test_put_file_exists_overwrite'
        self.backend.put(name, self.testdata)
        self.backend.put(name, self.alternative_testdata, overwrite=True)
        self.assertEqual(self.alternative_testdata, self.backend.get(name))

    def test_instantiation_fail(self) -> None:
        with self.assertRaises(NotADirectoryError):
            FilesystemBackend("C\\#~~")

    def test_exists(self) -> None:
        name = 'test_exists'
        self.backend.put(name, self.testdata)
        self.assertTrue(self.backend.exists(name))
        self.assertFalse(self.backend.exists('exists_not'))

    def test_get_not_existing(self) -> None:
        name = 'test_get_not_existing'
        with self.assertRaises(FileNotFoundError):
            self.backend.get(name)
示例#22
0
def create_archive(archive, excluded):
    """Creates a new zip archive file by excluding files at positions found in
    excluded.
    """
    new_archive_file = NamedTemporaryFile()
    temporary_directory = TemporaryDirectory()
    new_archive = ZipFile(new_archive_file, 'w')
    for index, filename in enumerate(archive.namelist()):
        if index not in excluded:
            archive.extract(filename, path=temporary_directory.name)
            new_archive.write(os.path.join(temporary_directory.name, filename))
    temporary_directory.cleanup()
    return new_archive_file
示例#23
0
class TestCase(unittest.TestCase):
    def __init__(self, methodName):
        os.environ['LANGUAGE'] = 'en_US.UTF-8'
        self.cfgFile = os.path.abspath(os.path.join(__file__, os.pardir, 'config'))
        logger.APP_NAME = 'BIT_unittest'
        logger.openlog()
        super(TestCase, self).__init__(methodName)

    def setUp(self):
        logger.DEBUG = '-v' in sys.argv
        self.run = False
        self.sharePathObj = TemporaryDirectory()
        self.sharePath = self.sharePathObj.name

    def tearDown(self):
        self.sharePathObj.cleanup()

    def callback(self, func, *args):
        func(*args)
        self.run = True

    def assertExists(self, *path):
        full_path = os.path.join(*path)
        if not os.path.exists(full_path):
            self.fail('File does not exist: {}'.format(full_path))

    def assertNotExists(self, *path):
        full_path = os.path.join(*path)
        if os.path.exists(full_path):
            self.fail('File does unexpected exist: {}'.format(full_path))

    def assertIsFile(self, *path):
        full_path = os.path.join(*path)
        if not os.path.isfile(full_path):
            self.fail('Not a File: {}'.format(full_path))

    def assertIsNoFile(self, *path):
        full_path = os.path.join(*path)
        if os.path.isfile(full_path):
            self.fail('Unexpected File: {}'.format(full_path))

    def assertIsDir(self, *path):
        full_path = os.path.join(*path)
        if not os.path.isdir(full_path):
            self.fail('Not a directory: {}'.format(full_path))

    def assertIsLink(self, *path):
        full_path = os.path.join(*path)
        if not os.path.islink(full_path):
            self.fail('Not a symlink: {}'.format(full_path))
示例#24
0
文件: itk.py 项目: ZhifangYe/fmriprep
    def _run_interface(self, runtime):
        # Get all inputs from the ApplyTransforms object
        ifargs = self.inputs.get()

        # Extract number of input images and transforms
        in_files = ifargs.pop('input_image')
        num_files = len(in_files)
        transforms = ifargs.pop('transforms')
        # Get number of parallel jobs
        num_threads = ifargs.pop('num_threads')
        save_cmd = ifargs.pop('save_cmd')

        # Remove certain keys
        for key in ['environ', 'ignore_exception',
                    'terminal_output', 'output_image']:
            ifargs.pop(key, None)

        # Get a temp folder ready
        tmp_folder = TemporaryDirectory(prefix='tmp-', dir=runtime.cwd)

        xfms_list = _arrange_xfms(transforms, num_files, tmp_folder)
        assert len(xfms_list) == num_files

        # Inputs are ready to run in parallel
        if num_threads < 1:
            num_threads = None

        if num_threads == 1:
            out_files = [_applytfms((
                in_file, in_xfm, ifargs, i, runtime.cwd))
                for i, (in_file, in_xfm) in enumerate(zip(in_files, xfms_list))
            ]
        else:
            from concurrent.futures import ThreadPoolExecutor
            with ThreadPoolExecutor(max_workers=num_threads) as pool:
                out_files = list(pool.map(_applytfms, [
                    (in_file, in_xfm, ifargs, i, runtime.cwd)
                    for i, (in_file, in_xfm) in enumerate(zip(in_files, xfms_list))]
                ))
        tmp_folder.cleanup()

        # Collect output file names, after sorting by index
        self._results['out_files'] = [el[0] for el in out_files]

        if save_cmd:
            self._results['log_cmdline'] = os.path.join(runtime.cwd, 'command.txt')
            with open(self._results['log_cmdline'], 'w') as cmdfile:
                print('\n-------\n'.join([el[1] for el in out_files]),
                      file=cmdfile)
        return runtime
示例#25
0
class TestDataSet(unittest.TestCase):

    def setUp(self):
         # Creating a temporary directory
        self.output_dir = TemporaryDirectory(prefix="post_hit_dataset_test_")

    def test_dataset(self):

        self.assertTrue(isinstance(dataset.DataSet(dataset_path = "roadmap_epigenomic.json"), dataset.DataSet))
        self.assertTrue(isinstance(dataset.DataSet(dataset_path = "".join([POST_HIT_PATH,"data/datasets/phyloP100way.json"])), dataset.DataSet))
        self.assertTrue(isinstance(dataset.DataSet(dataset_path ="ensembl"), dataset.DataSet))
        self.assertTrue(isinstance(dataset.DataSet(dataset_path = "gtex.json"), dataset.DataSet))

        self.assertRaises(ValueError, dataset.DataSet,"gtex.txt")
        self.assertRaises(ValueError, dataset.DataSet,"dummy")
        self.assertRaises(ValueError, dataset.DataSet,"dummy.json")

        self.dummy_dataset = dataset.DataSet(dataset_path = POST_HIT_PATH + "tmp/dummy_test.json",
                                    project= "TEST",
                                    description= "Dummy Test JSON FILE",
                                    project_link= "dummy.test",
                                    version= 0,
                                    data_path= "tmp/",
                                    protocole= "ftp",
                                    file_type= "gtf",
                                    ids=["DummyTest"],
                                    download_links=["ftp://ftp.ensembl.org/pub/release-75//gtf/homo_sapiens"],
                                    filenames=["DummyTest.gtf.gz"],
                                    metadata= [],
                                    data_representation={})

        self.assertTrue(isinstance(self.dummy_dataset, dataset.DataSet))
        self.assertTrue(os.path.exists(POST_HIT_PATH + "tmp/DummyTest.gtf.gz"), dataset.DataSet)
        folder = POST_HIT_PATH + 'tmp/'
        for the_file in os.listdir(folder):
            file_path = os.path.join(folder, the_file)
            try:
                if os.path.isfile(file_path):
                    os.unlink(file_path)
                elif os.path.isdir(file_path): 
                    shutil.rmtree(file_path)
            except Exception as e:
                print(e)


    def tearDown(self):
        """Finishes the test."""
        # Deleting the output directory
        self.output_dir.cleanup()
示例#26
0
class TestRestoreSSH(generic.SSHSnapshotsWithSidTestCase, TestRestoreLocal):
    def setUp(self):
        super(TestRestoreSSH, self).setUp()
        self.include = TemporaryDirectory()
        generic.create_test_files(os.path.join(self.remoteSIDBackupPath, self.include.name[1:]))

        #mount
        self.cfg.setCurrentHashId(mount.Mount(cfg = self.cfg).mount())

    def tearDown(self):
        #unmount
        mount.Mount(cfg = self.cfg).umount(self.cfg.current_hash_id)
        super(TestRestoreSSH, self).tearDown()

        self.include.cleanup()
示例#27
0
class RestoreTestCase(generic.SnapshotsWithSidTestCase):
    def setUp(self):
        super(RestoreTestCase, self).setUp()
        self.include = TemporaryDirectory()
        generic.create_test_files(self.sid.pathBackup(self.include.name))

    def tearDown(self):
        super(RestoreTestCase, self).tearDown()
        self.include.cleanup()

    def prepairFileInfo(self, restoreFile, mode = 33260):
        d = self.sid.fileInfo
        d[restoreFile.encode('utf-8', 'replace')] = (mode,
                                                     CURRENTUSER.encode('utf-8', 'replace'),
                                                     CURRENTGROUP.encode('utf-8', 'replace'))
        self.sid.fileInfo = d
示例#28
0
class SnapshotsTestCase(TestCase):
    def setUp(self):
        super(SnapshotsTestCase, self).setUp()
        self.cfgFile = os.path.abspath(os.path.join(__file__,
                                                    os.pardir,
                                                    'config'))
        self.cfg = config.Config(self.cfgFile)
        #use a new TemporaryDirectory for snapshotPath to avoid
        #side effects on leftovers
        self.tmpDir = TemporaryDirectory()
        self.cfg.dict['profile1.snapshots.path'] = self.tmpDir.name
        self.snapshotPath = self.cfg.get_snapshots_full_path()
        os.makedirs(self.snapshotPath)

    def tearDown(self):
        self.tmpDir.cleanup()
class MyTest(TestCase):

    def setUp(self):
        self.test_dir = TemporaryDirectory()

    def tearDown(self):
        self.test_dir.cleanup()
    # Test methods follow


# unittest.mock for python3 / mock open source package for python2
# unit test / integration test

# other refs
# nose: nose.readthedocs.org
# pytest: pytest.org 
class PythonDeveloperConsoleTestCase(TestCase):
    
    def setUp(self):
        self.tempdir = TemporaryDirectory()
        self.console = PythonDeveloperConsole()
        self.console.logger.setLevel(logging.WARNING)
        sys.path.append(self.tempdir.name)
    
    def tearDown(self):
        sys.path.remove(self.tempdir.name)
        self.tempdir.cleanup()
        
    def eval(self, expression):
        self.assertFalse(self.console.push("result = %s" % expression))
        
        return self.console.locals["result"]
示例#31
0
class _SVGConverter(_Converter):
    def __call__(self, orig, dest):
        old_inkscape = mpl._get_executable_info("inkscape").version < "1"
        terminator = b"\n>" if old_inkscape else b"> "
        if not hasattr(self, "_tmpdir"):
            self._tmpdir = TemporaryDirectory()
        if (not self._proc  # First run.
                or self._proc.poll() is not None):  # Inkscape terminated.
            env = {
                **os.environ,
                # If one passes e.g. a png file to Inkscape, it will try to
                # query the user for conversion options via a GUI (even with
                # `--without-gui`).  Unsetting `DISPLAY` prevents this (and
                # causes GTK to crash and Inkscape to terminate, but that'll
                # just be reported as a regular exception below).
                "DISPLAY":
                "",
                # Do not load any user options.
                "INKSCAPE_PROFILE_DIR":
                os.devnull,
            }
            # Old versions of Inkscape (e.g. 0.48.3.1) seem to sometimes
            # deadlock when stderr is redirected to a pipe, so we redirect it
            # to a temporary file instead.  This is not necessary anymore as of
            # Inkscape 0.92.1.
            stderr = TemporaryFile()
            self._proc = subprocess.Popen(
                ["inkscape", "--without-gui", "--shell"]
                if old_inkscape else ["inkscape", "--shell"],
                stdin=subprocess.PIPE,
                stdout=subprocess.PIPE,
                stderr=stderr,
                env=env,
                cwd=self._tmpdir.name)
            # Slight abuse, but makes shutdown handling easier.
            self._proc.stderr = stderr
            try:
                self._read_until(terminator)
            except _ConverterError as err:
                raise OSError("Failed to start Inkscape in interactive "
                              "mode") from err

        # Inkscape's shell mode does not support escaping metacharacters in the
        # filename ("\n", and ":;" for inkscape>=1).  Avoid any problems by
        # running from a temporary directory and using fixed filenames.
        inkscape_orig = Path(self._tmpdir.name, os.fsdecode(b"f.svg"))
        inkscape_dest = Path(self._tmpdir.name, os.fsdecode(b"f.png"))
        try:
            inkscape_orig.symlink_to(Path(orig).resolve())
        except OSError:
            shutil.copyfile(orig, inkscape_orig)
        self._proc.stdin.write(
            b"f.svg --export-png=f.png\n" if old_inkscape else
            b"file-open:f.svg;export-filename:f.png;export-do;file-close\n")
        self._proc.stdin.flush()
        try:
            self._read_until(terminator)
        except _ConverterError as err:
            # Inkscape's output is not localized but gtk's is, so the output
            # stream probably has a mixed encoding.  Using the filesystem
            # encoding should at least get the filenames right...
            self._proc.stderr.seek(0)
            raise ImageComparisonFailure(self._proc.stderr.read().decode(
                sys.getfilesystemencoding(), "replace")) from err
        os.remove(inkscape_orig)
        shutil.move(inkscape_dest, dest)

    def __del__(self):
        super().__del__()
        if hasattr(self, "_tmpdir"):
            self._tmpdir.cleanup()
示例#32
0
class GitRepoTestCase(TestGitPopenMockupMixin):
    def setup_method(self, method):
        self.log.info('GitRepoTestCase.setup_method({})'.format(method))
        # build temporary directory
        self.tempdir = TemporaryDirectory()
        # when initiating service with no repository, the connection is not triggered
        self.service = self.get_service()
        # setup http api mockup
        self.recorder = betamax.Betamax(self.get_requests_session())
        self.get_requests_session().headers['Accept-Encoding'] = 'identity'
        # setup git command mockup
        self.setup_git_popen()
        # when initiating service with no repository, the connection is not triggered
        self.service.repository = self.repository
        # have git commands logged
        Git.GIT_PYTHON_TRACE = True
        FORMAT = '> %(message)s'
        formatter = logging.Formatter(fmt=FORMAT)
        handler = logging.StreamHandler()
        handler.setFormatter(formatter)
        logging.getLogger('git.cmd').removeHandler(logging.NullHandler())
        logging.getLogger('git.cmd').addHandler(handler)
        logging.getLogger('git.cmd').propagate = True
        # have HTTP requests logged
        import http.client
        http.client.HTTPConnection.debuglevel = 1
        logging.getLogger('requests.packages.urllib3').setLevel(logging.DEBUG)
        logging.getLogger('requests.packages.urllib3').propagate = True

    def teardown_method(self, method):
        self.log.info('GitRepoTestCase.teardown_method({})'.format(method))
        self.tempdir.cleanup()

    '''cassette name helper'''

    def _make_cassette_name(self):
        # returns the name of the function calling the function calling this one
        # in other words, when used in an helper function, returns the name of
        # the test function calling the helper function, to make a cassette name.
        test_function_name = sys._getframe(2).f_code.co_name
        if test_function_name.startswith('test'):
            return '_'.join(['test', self.service.name, test_function_name])
        raise Exception("Helpers functions shall be used only within test functions!")

    '''assertion helpers'''

    def assert_repository_exists(self, namespace, repository):
        try:
            self.service.get_repository(namespace, repository)
        except Exception as err:
            raise AssertionError("Repository {}/{} not found on {}: {}".format(namespace,
                                                                               repository,
                                                                               self.service.name,
                                                                               err)) from err

    def assert_repository_not_exists(self, namespace, repository):
        try:
            self.service.get_repository(namespace, repository)
        except Exception as err:
            return
        #raise AssertionError("Repository {}/{} exists on {}".format(namespace,
        #                                                                       repository,
        #                                                                       self.service.name,
        #                                                                ))

    def assert_added_remote(self, remote):
        try:
            self.repository.remote(remote)
        except ValueError as err:
            raise AssertionError("Remote {} not in repository".format(remote)) from err

    def assert_added_remote_defaults(self):
        self.assert_added_remote(self.service.name)
        self.assert_added_remote('all')

    def assert_tracking_remote(self, remote_name=None, branch_name='master'):
        if not remote_name:
            remote_name = self.service.name
        for branch in self.repository.branches:
            if branch == branch_name:
                assert remote_name in self.repository.branches[0].tracking_branch().name, \
                    'Could not set "{}" as tracking branch master'.format(self.service.name)

    '''test cases templates'''

    def action_fork(self, local_namespace, remote_namespace, repository):
        # hijack subprocess call
        with self.mockup_git(local_namespace, repository):
            # prepare output for git commands
            remote_slug = self.service.format_path(namespace=remote_namespace, repository=repository, rw=True)
            local_slug = self.service.format_path(namespace=local_namespace, repository=repository, rw=True)
            self.set_mock_popen_commands([
                ('git remote add upstream {}'.format(remote_slug), b'', b'', 0),
                ('git remote add all {}'.format(local_slug), b'', b'', 0),
                ('git remote add {} {}'.format(self.service.name, local_slug), b'', b'', 0),
                ('git version', b'git version 2.8.0', b'', 0),
                ('git pull --progress -v {} master'.format(self.service.name), b'', '\n'.join([
                    'POST git-upload-pack (140 bytes)',
                    'remote: Counting objects: 8318, done.',
                    'remote: Compressing objects: 100% (3/3), done.',
                    'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                    'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                    'Resolving deltas: 100% (5126/5126), done.',
                    'From {}:{}/{}'.format(self.service.fqdn, local_namespace, repository),
                    ' * branch            master     -> FETCH_HEAD',
                    ' * [new branch]      master     -> {}/master'.format(self.service.name)]).encode('utf-8'),
                0)
            ])
            with self.recorder.use_cassette(self._make_cassette_name()):
                self.service.connect()
                self.service.fork(remote_namespace, repository)
                # emulate the outcome of the git actions
                self.service.repository.create_remote('upstream', url=remote_slug)
                self.service.repository.create_remote('all', url=local_slug)
                self.service.repository.create_remote(self.service.name, url=local_slug)

    def action_fork__no_clone(self, local_namespace, remote_namespace, repository):
        # hijack subprocess call
        with self.mockup_git(local_namespace, repository):
            # prepare output for git commands
            remote_slug = self.service.format_path(namespace=remote_namespace, repository=repository, rw=True)
            local_slug = self.service.format_path(namespace=local_namespace, repository=repository, rw=True)
            self.set_mock_popen_commands([
                ('git remote add upstream {}'.format(remote_slug), b'', b'', 0),
                ('git remote add all {}'.format(local_slug), b'', b'', 0),
                ('git remote add {} {}'.format(self.service.name, local_slug), b'', b'', 0),
                ('git version', b'git version 2.8.0', b'', 0),
                ('git pull --progress -v {} master'.format(self.service.name), b'', '\n'.join([
                    'POST git-upload-pack (140 bytes)',
                    'remote: Counting objects: 8318, done.',
                    'remote: Compressing objects: 100% (3/3), done.',
                    'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                    'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                    'Resolving deltas: 100% (5126/5126), done.',
                    'From {}:{}/{}'.format(self.service.fqdn, local_namespace, repository),
                    ' * branch            master     -> FETCH_HEAD',
                    ' * [new branch]      master     -> {}/master'.format(self.service.name)]).encode('utf-8'),
                0)
            ])
            with self.recorder.use_cassette(self._make_cassette_name()):
                self.service.connect()
                self.service.fork(remote_namespace, repository)
                # emulate the outcome of the git actions
                self.service.repository.create_remote('upstream', url=remote_slug)
                self.service.repository.create_remote('all', url=local_slug)
                self.service.repository.create_remote(self.service.name, url=local_slug)

    def action_clone(self, namespace, repository):
        # hijack subprocess call
        with self.mockup_git(namespace, repository):
            local_slug = self.service.format_path(namespace=namespace, repository=repository, rw=True)
            self.set_mock_popen_commands([
                ('git remote add all {}'.format(local_slug), b'', b'', 0),
                ('git remote add {} {}'.format(self.service.name, local_slug), b'', b'', 0),
                ('git version', b'git version 2.8.0', b'', 0),
                ('git pull --progress -v {} master'.format(self.service.name), b'', '\n'.join([
                    'POST git-upload-pack (140 bytes)',
                    'remote: Counting objects: 8318, done.',
                    'remote: Compressing objects: 100% (3/3), done.',
                    'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                    'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                    'Resolving deltas: 100% (5126/5126), done.',
                    'From {}:{}/{}'.format(self.service.fqdn, namespace, repository),
                    ' * branch            master     -> FETCH_HEAD',
                    ' * [new branch]      master     -> {}/master'.format(self.service.name)]).encode('utf-8'),
                0)
            ])
            with self.recorder.use_cassette(self._make_cassette_name()):
                self.service.connect()
                self.service.clone(namespace, repository)
                self.service.repository.create_remote('all', url=local_slug)
                self.service.repository.create_remote(self.service.name, url=local_slug)

    def action_create(self, namespace, repository):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            self.service.create(namespace, repository, add=True)
            #
            self.assert_repository_exists(namespace, repository)
            self.assert_added_remote_defaults()

    def action_create__no_add(self, namespace, repository):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            self.service.create(namespace, repository, add=False)
            #
            self.assert_repository_exists(namespace, repository)
            self.assert_added_remote_defaults()

    def action_delete(self, repository, namespace=None):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            if namespace:
                self.service.delete(user=namespace, repo=repository)
            else:
                self.service.delete(repo=repository)
            #
            if not namespace:
                namespace = self.service.user
            self.assert_repository_not_exists(namespace, repository)

    def action_add(self, namespace, repository, alone=False, name=None, tracking='master'):
        with self.recorder.use_cassette(self._make_cassette_name()):
            # init git in the repository's destination
            self.repository.init()
            self.service.connect()
            self.service.add(user=namespace, repo=repository, alone=alone, name=name, tracking=tracking)
            #
            if not tracking:
                if not alone and not name:
                    self.assert_added_remote_defaults()
                elif not alone and name:
                    self.assert_added_remote(name)
                    self.assert_added_remote('all')
                elif alone and not name:
                    self.assert_added_remote(self.service.name)
                elif alone and name:
                    self.assert_added_remote(name)
            else:
                if not alone and not name:
                    self.assert_added_remote_defaults()
                    self.assert_tracking_remote()
                elif not alone and name:
                    self.assert_added_remote(name)
                    self.assert_added_remote('all')
                    self.assert_tracking_remote(name)
                elif alone and not name:
                    self.assert_added_remote(self.service.name)
                    self.assert_tracking_remote(branch_name=tracking)
                elif alone and name:
                    self.assert_added_remote(name)
                    self.assert_tracking_remote(name, tracking)

    def action_list(self, namespace, _long=False):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            self.service.list(namespace, _long=_long)

    def action_request_list(self, namespace, repository, rq_list_data=[]):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            requests = list(self.service.request_list(user=namespace, repo=repository))
            for i, rq in enumerate(rq_list_data):
                assert requests[i] == rq

    def action_request_fetch(self, namespace, repository, request, pull=False, fail=False, remote_branch='pull', local_branch='requests'):
        local_slug = self.service.format_path(namespace=namespace, repository=repository, rw=False)
        with self.recorder.use_cassette(self._make_cassette_name()):
            with self.mockup_git(namespace, repository):
                self.set_mock_popen_commands([
                    ('git remote add all {}'.format(local_slug), b'', b'', 0),
                    ('git remote add {} {}'.format(self.service.name, local_slug), b'', b'', 0),
                    ('git version', b'git version 2.8.0', b'', 0),
                    ('git pull --progress -v {} master'.format(self.service.name), b'', '\n'.join([
                        'POST git-upload-pack (140 bytes)',
                        'remote: Counting objects: 8318, done.',
                        'remote: Compressing objects: 100% (3/3), done.',
                        'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                        'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                        'Resolving deltas: 100% (5126/5126), done.',
                        'From {}:{}/{}'.format(self.service.fqdn, namespace, repository),
                        ' * branch            master     -> FETCH_HEAD',
                        ' * [new branch]      master     -> {1}/{0}'.format(request, local_branch)]).encode('utf-8'),
                    0),
                    ('git version', b'git version 2.8.0', b'', 0),
                    ('git fetch --progress -v {0} {2}/{1}/head:{3}/{1}'.format(
                            self.service.name,
                            request,
                            remote_branch,
                            local_branch), b'', '\n'.join([
                        'POST git-upload-pack (140 bytes)',
                        'remote: Counting objects: 8318, done.',
                        'remote: Compressing objects: 100% (3/3), done.',
                        'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                        'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                        'Resolving deltas: 100% (5126/5126), done.',
                        'From {}:{}/{}'.format(self.service.fqdn, namespace, repository),
                        ' * [new branch]      master     -> request/{}'.format(request)]).encode('utf-8'),
                    0)
                ])
                self.service.connect()
                self.service.clone(namespace, repository, rw=False)
            if not fail:
                self.service.repository.create_remote('all', url=local_slug)
                self.service.repository.create_remote(self.service.name, url=local_slug)
            with self.mockup_git(namespace, repository):
                self.set_mock_popen_commands([
                    ('git version', b'git version 2.8.0', b'', 0),
                    ('git fetch --progress -v {0} {2}/{1}/head:{3}/{1}'.format(
                            self.service.name,
                            request,
                            remote_branch,
                            local_branch), b'', '\n'.join([
                        'POST git-upload-pack (140 bytes)',
                        'remote: Counting objects: 8318, done.',
                        'remote: Compressing objects: 100% (3/3), done.',
                        'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                        'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                        'Resolving deltas: 100% (5126/5126), done.',
                        'From {}:{}/{}'.format(self.service.fqdn, namespace, repository),
                        ' * [new branch]      master     -> {1}/{0}'.format(request, local_branch)]).encode('utf-8'),
                    0)
                ])
                self.service.request_fetch(repository, namespace, request)

    def action_request_create(self,
            namespace, repository, branch,
            title, description, service,
            create_repository='test_create_requests',
            create_branch='pr-test'):
        '''
        Here we are testing the subcommand 'request create'.

        this test needs sensibly more preparation than other tests, because to create
        a pull request, you need:

        * a repository with commits on both the service and your workspace
        * a new branch with new commits, that has been pushed on the service

        So that's what we're doing below:
            * create a test project on the service,
            * populate the temporary git repository with it
            * create a commit and push it to the service as master
            * create a branch in the workspace
            * create a commit and push it to the service as pr-test

        Then we test the feature:
            * using the branch create a pull request and check the pull request is there

        Finally clean the remote repository

        So all the contextual work is only done
        '''
        cassette_name = self._make_cassette_name()
        will_record = 'never' != self.recorder.config.default_cassette_options['record_mode'] \
                and not os.path.exists(os.path.join(self.recorder.config.cassette_library_dir, cassette_name+'.json'))

        @contextmanager
        def prepare_project_for_test():
            if will_record:
                self.service.connect()
                # let's create a project and add it to current repository
                self.service.create(namespace, create_repository, add=True)
                # make a modification, commit and push it
                with open(os.path.join(self.repository.working_dir, 'first_file'), 'w') as test:
                    test.write('he who makes a beast of himself gets rid of the pain of being a man. Dr Johnson')
                self.repository.git.add('first_file')
                self.repository.git.commit(message='First commit')
                self.repository.git.push(self.service.name, 'master')
                # create a new branch
                new_branch = self.repository.create_head(create_branch, 'HEAD')
                self.repository.head.reference = new_branch
                self.repository.head.reset(index=True, working_tree=True)
                # make a modification, commit and push it to that branch
                with open(os.path.join(self.repository.working_dir, 'second_file'), 'w') as test:
                    test.write('La meilleure façon de ne pas avancer est de suivre une idée fixe. J.Prévert')
                self.repository.git.add('second_file')
                self.repository.git.commit(message='Second commit')
                self.repository.git.push(service, create_branch)
            yield
            if will_record:
                self.service.delete(create_repository)

        #self.service.repository = self.repository
        with prepare_project_for_test():
            with self.recorder.use_cassette(cassette_name):
                self.service.connect()
                request = self.service.request_create(
                        namespace,
                        repository,
                        branch,
                        title,
                        description
                )
                return request

    def action_gist_list(self, gist=None, gist_list_data=[]):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            if gist is None:
                gists = list(self.service.gist_list())
                for i, g in enumerate(gist_list_data):
                    assert gists[i] == g
            else:
                gist_files = list(self.service.gist_list(gist))
                for i, gf in enumerate(gist_list_data):
                    assert gist_files[i] == gf

    def action_gist_clone(self, gist):
        with self.mockup_git(None, None):
            self.set_mock_popen_commands([
                ('git version', b'git version 2.8.0', b'', 0),
                ('git remote add gist {}.git'.format(gist), b'', b'', 0),
                ('git pull --progress -v gist master', b'', b'\n'.join([
                    b'POST git-upload-pack (140 bytes)',
                    b'remote: Counting objects: 8318, done.',
                    b'remote: Compressing objects: 100% (3/3), done.',
                    b'remote: Total 8318 (delta 0), reused 0 (delta 0), pack-reused 8315',
                    b'Receiving objects: 100% (8318/8318), 3.59 MiB | 974.00 KiB/s, done.',
                    b'Resolving deltas: 100% (5126/5126), done.',
                    bytes('From {}'.format(gist), 'utf-8'),
                    b' * branch            master     -> FETCH_HEAD']),
                0),
            ])
            with self.recorder.use_cassette(self._make_cassette_name()):
                self.service.connect()
                self.service.gist_clone(gist)


    def action_gist_fetch(self, gist, gist_file=None):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            content = self.service.gist_fetch(gist, gist_file)
            return content

    def action_gist_create(self, description, gist_files, secret):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            content = self.service.gist_create(gist_files, description, secret)

    def action_gist_delete(self, gist):
        with self.recorder.use_cassette(self._make_cassette_name()):
            self.service.connect()
            content = self.service.gist_delete(gist)

    def action_open(self, namespace, repository):
        self.set_mock_popen_commands([
            ('xdg-open {}'.format(self.service.format_path(namespace=namespace, repository=repository)), b'', b'', 0),
            ('open {}'.format(self.service.format_path(namespace=namespace, repository=repository)), b'', b'', 0),
        ])
        with Replace('subprocess.Popen', self.Popen):
            self.service.open(user=namespace, repo=repository)
示例#33
0
class TestAllowlistRequirements(TestCase):
    """
    Tests for the bandersnatch filtering by requirements
    """
    def setUp(self) -> None:
        self.cwd = os.getcwd()
        self.tempdir = TemporaryDirectory()
        os.chdir(self.tempdir.name)

    def tearDown(self) -> None:
        if self.tempdir:
            assert self.cwd
            os.chdir(self.cwd)
            self.tempdir.cleanup()

    def test__plugin__loads__explicitly_enabled(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    project_requirements_pinned
""")

        plugins = bandersnatch.filter.LoadedFilters().filter_release_plugins()
        names = [plugin.name for plugin in plugins]
        self.assertListEqual(names, ["project_requirements_pinned"])
        self.assertEqual(len(plugins), 1)

    def test__plugin__doesnt_load__explicitly__disabled(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_package
""")

        plugins = bandersnatch.filter.LoadedFilters().filter_release_plugins()
        names = [plugin.name for plugin in plugins]
        self.assertNotIn("project_requirements", names)

    def test__filter__matches__release(self) -> None:

        with open(Path(self.tempdir.name) / "requirements.txt", "w") as fh:
            fh.write("""\
#    This is needed for workshop 1
#
foo==1.2.0             # via -r requirements.in
""")

        mock_config(f"""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    project_requirements
    project_requirements_pinned
[allowlist]
requirements_path = {self.tempdir.name}
requirements =
    requirements.txt
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        pkg = Package("foo", 1)
        pkg._metadata = {
            "info": {
                "name": "foo"
            },
            "releases": {
                "1.2.0": {},
                "1.2.1": {}
            },
        }

        pkg.filter_all_releases(mirror.filters.filter_release_plugins())

        self.assertEqual({"1.2.0": {}}, pkg.releases)

    def test__filter__find_files(self) -> None:
        absolute_file_path = Path(self.tempdir.name) / "requirements.txt"
        with open(absolute_file_path, "w") as fh:
            fh.write("""\
#    This is needed for workshop 1
#
foo==1.2.0             # via -r requirements.in
""")

        mock_config(f"""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    project_requirements
[allowlist]
requirements =
    {absolute_file_path}
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))

        mirror.packages_to_sync = {
            "foo": "",
            "bar": "",
            "baz": "",
        }
        mirror._filter_packages()
        self.assertEqual({"foo": ""}, mirror.packages_to_sync)
示例#34
0
class TestSyncInotify(unittest.TestCase):
    def watch_events(self):
        '''Watch events until an IGNORED is received for the main watch, then
        return the events.'''
        events = []
        with self.inotify as inotify:
            for event in inotify:
                events.append(event)
                if Mask.IGNORED in event and event.watch is self.watch:
                    return events

    def gather_events(self, function):
        '''Run the function and then watch events until you can return the
        result.'''

        try:
            function()
        finally:
            self.inotify.rm_watch(self.watch)

        return self.watch_events()

    def setUp(self):
        self._dir = TemporaryDirectory()
        self.dir = Path(self._dir.name)
        self.inotify = Inotify()
        self.watch = self.inotify.add_watch(
            self.dir,
            Mask.ACCESS | Mask.MODIFY | Mask.ATTRIB | Mask.CLOSE_WRITE
            | Mask.CLOSE_NOWRITE | Mask.OPEN | Mask.MOVED_FROM | Mask.MOVED_TO
            | Mask.CREATE | Mask.DELETE | Mask.DELETE_SELF | Mask.MOVE_SELF)

    def tearDown(self):
        self._dir.cleanup()

    def test_diriterated(self):
        def test():
            list(self.dir.iterdir())

        events = self.gather_events(test)
        self.assertTrue(all(event.watch is self.watch for event in events))
        self.assertTrue(
            any(Mask.ISDIR | Mask.OPEN in event and event.path == self.dir
                for event in events))
        self.assertTrue(
            any(Mask.ISDIR | Mask.ACCESS in event and event.path == self.dir
                for event in events))
        self.assertTrue(
            any(Mask.ISDIR
                | Mask.CLOSE_NOWRITE in event and event.path == self.dir
                for event in events))
        self.assertTrue(
            any(Mask.IGNORED in event and event.path == self.dir
                for event in events))

    def test_foo_opened_and_closed(self):
        def test():
            with open(self.dir / 'foo', 'w') as file:
                pass
            with open(self.dir / 'foo', 'r') as file:
                pass

        events = self.gather_events(test)
        self.assertTrue(all(event.watch is self.watch for event in events))
        self.assertTrue(
            any(Mask.CREATE in event and event.path == self.dir / 'foo'
                for event in events))
        self.assertTrue(
            any(Mask.OPEN in event and event.path == self.dir / 'foo'
                for event in events))
        self.assertTrue(
            any(Mask.CLOSE_WRITE in event and event.path == self.dir / 'foo'
                for event in events))
        self.assertTrue(
            any(Mask.CLOSE_NOWRITE in event and event.path == self.dir / 'foo'
                for event in events))

    def test_foo_deleted(self):
        def test():
            with open(self.dir / 'foo', 'w') as file:
                pass

            (self.dir / 'foo').unlink()

        events = self.gather_events(test)
        self.assertTrue(all(event.watch is self.watch for event in events))
        self.assertTrue(
            any(Mask.DELETE in event and event.path == self.dir / 'foo'
                for event in events))

    def test_foo_write(self):
        def test():
            with open(self.dir / 'foo', 'w') as file:
                file.write('test')

        events = self.gather_events(test)
        self.assertTrue(all(event.watch is self.watch for event in events))
        self.assertTrue(
            any(Mask.CREATE in event and event.path == self.dir / 'foo'
                for event in events))
        self.assertTrue(
            any(Mask.OPEN in event and event.path == self.dir / 'foo'
                for event in events))
        self.assertTrue(
            any(Mask.MODIFY in event and event.path == self.dir / 'foo'
                for event in events))
        self.assertTrue(
            any(Mask.CLOSE_WRITE in event and event.path == self.dir / 'foo'
                for event in events))

    def test_foo_moved(self):
        def test():
            with open(self.dir / 'foo', 'w') as file:
                pass

            (self.dir / 'foo').rename(self.dir / 'bar')

        events = self.gather_events(test)
        self.assertTrue(all(event.watch is self.watch for event in events))
        self.assertTrue(
            any(Mask.MOVED_FROM in event and event.path == self.dir / 'foo'
                for event in events))
        self.assertTrue(
            any(Mask.MOVED_TO in event and event.path == self.dir / 'bar'
                for event in events))
        self.assertEqual(
            next(event.cookie for event in events if Mask.MOVED_FROM in event),
            next(event.cookie for event in events if Mask.MOVED_TO in event),
        )

    def test_foo_attrib(self):
        def test():
            with open(self.dir / 'foo', 'w') as file:
                pass

            (self.dir / 'foo').chmod(0o777)

        events = self.gather_events(test)
        self.assertTrue(all(event.watch is self.watch for event in events))
        self.assertTrue(
            any(Mask.ATTRIB in event and event.path == self.dir / 'foo'
                for event in events))

    def test_onlydir_error(self):
        with open(self.dir / 'foo', 'w'):
            pass

        # Will not raise error
        self.inotify.add_watch(self.dir / 'foo', Mask.ATTRIB)

        with self.assertRaises(InotifyError):
            self.inotify.add_watch(self.dir / 'foo',
                                   Mask.ATTRIB | Mask.ONLYDIR)

    def test_nonexist_error(self):
        with self.assertRaises(InotifyError):
            self.inotify.add_watch(self.dir / 'foo',
                                   Mask.ATTRIB | Mask.ONLYDIR)

        with self.assertRaises(InotifyError):
            self.inotify.add_watch(self.dir / 'foo', Mask.ATTRIB)

    def test_move_self(self):
        with open(self.dir / 'foo', 'w'):
            pass

        watch = self.inotify.add_watch(self.dir / 'foo', Mask.MOVE_SELF)

        def test():
            (self.dir / 'foo').rename(self.dir / 'bar')

        events = self.gather_events(test)
        self.assertTrue(
            any(Mask.MOVE_SELF in event and event.path == self.dir /
                'foo' and event.watch is watch for event in events))

    def test_delete_self(self):
        with open(self.dir / 'foo', 'w'):
            pass

        watch = self.inotify.add_watch(self.dir / 'foo', Mask.DELETE_SELF)

        def test():
            (self.dir / 'foo').unlink()

        events = self.gather_events(test)

        self.assertTrue(
            any(Mask.DELETE_SELF in event and event.path == self.dir /
                'foo' and event.watch is watch for event in events))
        self.assertTrue(
            any(Mask.IGNORED in event and event.path == self.dir /
                'foo' and event.watch is watch for event in events))
        self.assertTrue(
            any(Mask.IGNORED in event and event.path == self.dir
                for event in events))

    def test_oneshot(self):
        with open(self.dir / 'foo', 'w'):
            pass

        watch = self.inotify.add_watch(self.dir / 'foo',
                                       Mask.CREATE | Mask.OPEN | Mask.ONESHOT)

        def test():
            with open(self.dir / 'foo', 'r'):
                pass
            (self.dir / 'foo').unlink()

        events = self.gather_events(test)

        # We check for name is None because only the first event will have a watch value
        self.assertTrue(
            any(Mask.OPEN in event and event.name is None
                and event.path == self.dir / 'foo' and event.watch is watch
                for event in events))
        # The oneshot has already expired, so this should not exist
        self.assertFalse(
            any(Mask.DELETE in event and event.name is None
                for event in events))
示例#35
0
class BaseCSVLoggerTest:
    # pylint: disable=not-callable,no-member
    CSVLogger = None
    batch_size = 20
    lr = 1e-3
    num_epochs = 10

    def setUp(self):
        torch.manual_seed(42)
        self.pytorch_network = nn.Linear(1, 1)
        self.loss_function = nn.MSELoss()
        self.optimizer = torch.optim.SGD(self.pytorch_network.parameters(),
                                         lr=BaseCSVLoggerTest.lr)
        self.model = Model(self.pytorch_network, self.optimizer,
                           self.loss_function)
        self.temp_dir_obj = TemporaryDirectory()
        self.csv_filename = os.path.join(self.temp_dir_obj.name, 'my_log.csv')

    def tearDown(self):
        self.temp_dir_obj.cleanup()

    def test_logging(self):
        train_gen = some_data_generator(20)
        valid_gen = some_data_generator(20)
        logger = self.CSVLogger(self.csv_filename)
        history = self.model.fit_generator(train_gen,
                                           valid_gen,
                                           epochs=self.num_epochs,
                                           steps_per_epoch=5,
                                           callbacks=[logger])
        self._test_logging(history)

    def test_logging_with_batch_granularity(self):
        train_gen = some_data_generator(20)
        valid_gen = some_data_generator(20)
        logger = self.CSVLogger(self.csv_filename, batch_granularity=True)
        history = History()
        self.model.fit_generator(train_gen,
                                 valid_gen,
                                 epochs=self.num_epochs,
                                 steps_per_epoch=5,
                                 callbacks=[logger, history])
        self._test_logging(history.history)

    def test_logging_append(self):
        train_gen = some_data_generator(20)
        valid_gen = some_data_generator(20)
        logger = self.CSVLogger(self.csv_filename)
        history = self.model.fit_generator(train_gen,
                                           valid_gen,
                                           epochs=self.num_epochs,
                                           steps_per_epoch=5,
                                           callbacks=[logger])
        logger = self.CSVLogger(self.csv_filename, append=True)
        history2 = self.model.fit_generator(train_gen,
                                            valid_gen,
                                            epochs=20,
                                            steps_per_epoch=5,
                                            initial_epoch=self.num_epochs,
                                            callbacks=[logger])
        self._test_logging(history + history2)

    def test_logging_overwrite(self):
        train_gen = some_data_generator(20)
        valid_gen = some_data_generator(20)
        logger = self.CSVLogger(self.csv_filename)
        self.model.fit_generator(train_gen,
                                 valid_gen,
                                 epochs=self.num_epochs,
                                 steps_per_epoch=5,
                                 callbacks=[logger])
        logger = self.CSVLogger(self.csv_filename, append=False)
        history = self.model.fit_generator(train_gen,
                                           valid_gen,
                                           epochs=20,
                                           steps_per_epoch=5,
                                           initial_epoch=self.num_epochs,
                                           callbacks=[logger])
        self._test_logging(history)

    def _test_logging(self, history):
        with open(self.csv_filename) as csvfile:
            reader = csv.DictReader(csvfile)
            rows = []
            for row in reader:
                if row['epoch'] != '':
                    self.assertAlmostEqual(float(row['lr']),
                                           BaseCSVLoggerTest.lr)
                del row['lr']
                rows.append(row)
        self.assertEqual(len(rows), len(history))
        for row, hist_entry in zip(rows, history):
            row = {k: v for k, v in row.items() if v != ''}
            self.assertEqual(row.keys(), hist_entry.keys())
            for k in row.keys():
                if isinstance(hist_entry[k], float):
                    self.assertAlmostEqual(float(row[k]), hist_entry[k])
                else:
                    self.assertEqual(str(row[k]), str(hist_entry[k]))
示例#36
0
class CacheTests(TestCase):

    def get_mock_league(self, **kwargs):
        league = mock.Mock(season=2019, league_id=9999)
        league.configure_mock(**kwargs)
        return league

    def setUp(self):
        self.tmp = TemporaryDirectory()
        self.mock_league = self.get_mock_league()
        self.data = dict(zip("abcdef", range(6)))

    def test_base_cache(self):
        cache = Cache()
        with self.assertRaises(NotImplementedError):
            cache.load()
        with self.assertRaises(NotImplementedError):
            cache.save(None)
        cache.set_league(self.mock_league)
        self.assertIs(cache.league, self.mock_league)

    def test_cache_load(self):
        cache = LocalCache(self.tmp.name)
        cache.set_league(self.mock_league)
        wo_sp_name = os.path.join(self.tmp.name, "2019_9999.json")
        w_sp_name = os.path.join(self.tmp.name, "2019_9999_sp01.json")
        # cache miss
        self.assertIsNone(cache.load())
        self.assertIsNone(cache.load(1))
        # write file so cache hits
        with open(wo_sp_name, "w") as f:
            json.dump(self.data, f)
        data = cache.load()
        self.assertEqual(data, self.data)
        with open(w_sp_name, "w") as f:
            json.dump(self.data, f)
        data = cache.load(1)
        self.assertEqual(data, self.data)
        # ignore hit on purpose
        cache = LocalCache(self.tmp.name, ignore_cache=True)
        cache.set_league(self.mock_league)
        self.assertTrue(os.path.exists(wo_sp_name))
        self.assertIsNone(cache.load())

    def test_cache_save(self):
        cache = LocalCache(self.tmp.name)
        cache.set_league(self.mock_league)
        # save data with no scoring period
        expected_fname = os.path.join(self.tmp.name, "2019_9999.json")
        cache.save(self.data)
        self.assertTrue(os.path.exists(expected_fname))
        with open(expected_fname) as f:
            self.assertEqual(self.data, json.load(f))
        # save data with scoring period
        expected_fname = os.path.join(self.tmp.name, "2019_9999_sp14.json")
        cache.save(self.data, 14)
        self.assertTrue(os.path.exists(expected_fname))
        with open(expected_fname) as f:
            self.assertEqual(self.data, json.load(f))

    def test_invalid_cache(self):
        not_real_dir = "/tmp/laskdjflaskdfla"
        self.assertFalse(os.path.exists(not_real_dir))
        with self.assertRaises(ValueError):
            cache = LocalCache(not_real_dir)

    def test_cache_decorator(self):
        # decorated function to test
        @cache_operation
        def decorated(league):
            return 42

        # when first arg has no cache attr, `decorated` should fire
        self.assertEqual(decorated(None), 42)

        # setup cache and league
        cache = LocalCache(self.tmp.name)
        league = self.get_mock_league(cache=cache) # league needs cache attr
        cache.set_league(league)

        # when first arg has cache that misses, `decorated` should fire
        self.assertEqual(decorated(league), 42)

        # write file so cache hits
        wo_sp_name = os.path.join(self.tmp.name, "2019_9999.json")
        with open(wo_sp_name, "w") as f:
            json.dump(self.data, f)

        # when first arg has cache that hits, expect that data
        self.assertEqual(decorated(league), self.data)


    def tearDown(self):
        self.tmp.cleanup()
示例#37
0
class TestMainPipeline(unittest.TestCase):

    def setUp(self):
        """Setup the tests."""
        # Creating the temporary directory
        self.output_dir = TemporaryDirectory(prefix="genipe_test_")

    def tearDown(self):
        """Finishes the test."""
        # Deleting the output directory
        self.output_dir.cleanup()

    def test_file_sorter(self):
        """Tests the 'file_sorter' function."""
        filenames = [
            "chr1.1000_100000.impute2",
            "chr2.10000_1002300.impute2",
            "chr1.1_100.impute2",
            "/some/path/chr1.1_10.impute2",
            "chr25_1.3_40.impute2",
            "chr23.100_400.impute2",
            "some/path/to_file/chr25_2.1000_1500.impute2.gz",
            "chr25_2.1600_1650.impute2.gz",
            "chr1.100000_2000000.impute2.some_extension",
        ]
        expected_filenames = [
            "/some/path/chr1.1_10.impute2",
            "chr1.1_100.impute2",
            "chr1.1000_100000.impute2",
            "chr1.100000_2000000.impute2.some_extension",
            "chr2.10000_1002300.impute2",
            "chr23.100_400.impute2",
            "chr25_1.3_40.impute2",
            "some/path/to_file/chr25_2.1000_1500.impute2.gz",
            "chr25_2.1600_1650.impute2.gz",
        ]
        expected_results = [(1, 1000, 100000), (2, 10000, 1002300),
                            (1, 1, 100), (1, 1, 10), (25, 3, 40),
                            (23, 100, 400), (25, 1000, 1500),
                            (25, 1600, 1650),
                            (1, 100000, 2000000)]

        # Trying the function
        for filename, expected in zip(filenames, expected_results):
            self.assertEqual(expected, cli.file_sorter(filename))

        # Trying the sort function
        filenames.sort(key=cli.file_sorter)
        self.assertEqual(filenames, expected_filenames)

    def test_get_chromosome_length(self):
        """Tests the 'get_chromosome_length' function."""
        # The expected chromosome
        expected_chrom = {3, 6, 9, 23, 25}
        expected_length = {}

        # Writing some legend file for different chromosome
        legend_template = os.path.join(self.output_dir.name,
                                       "chr{chrom}.legend")
        legend_chr23 = os.path.join(self.output_dir.name,
                                    "chr23_nonPAR.legend")
        legend_par1 = os.path.join(self.output_dir.name, "chr23_PAR1.legend")
        legend_par2 = os.path.join(self.output_dir.name, "chr23_PAR2.legend")
        for chrom in expected_chrom:
            if chrom == 23:
                # Getting the positions and expected length
                positions = sorted([randint(5000, 100000) for i in range(100)])
                expected_length[chrom] = (min(positions), max(positions))

                # Writing the positions to file
                with open(legend_chr23, "w") as o_file:
                    print("id", "position", file=o_file)
                    for i, position in enumerate(positions):
                        print("marker_{}".format(i+1), position, file=o_file)

                # Continuing to next chromosome
                continue

            if chrom == 25:
                # Getting the positions and expected length
                positions = sorted([randint(1, 4999) for i in range(100)])
                expected_length[chrom] = [max(positions)]

                # Writing the positions to file
                with open(legend_par1, "w") as o_file:
                    print("id", "position", file=o_file)
                    for i, position in enumerate(positions):
                        print("marker_{}".format(i+1), position, file=o_file)

                # Getting the positions and expected length
                positions = sorted([
                    randint(100000, 200000) for i in range(100)
                ])
                expected_length[chrom].extend([min(positions), max(positions)])
                expected_length[chrom] = tuple(expected_length[chrom])

                # Writing the positions to file
                with open(legend_par2, "w") as o_file:
                    print("id", "position", file=o_file)
                    for i, position in enumerate(positions):
                        print("marker_{}".format(i+1), position, file=o_file)

                # Continuing to next chromosome
                continue

            # Getting the position and saving the expected length
            positions = sorted([randint(1, 2000000) for i in range(1000)])
            expected_length[chrom] = max(positions)

            # Saving the file
            with open(legend_template.format(chrom=chrom), "w") as o_file:
                print("id", "position", file=o_file)
                for i, position in enumerate(positions):
                    print("marker_{}".format(i+1), position, file=o_file)

        # Getting the chromosome length
        chrom_length = cli.get_chromosome_length(
            required_chrom=expected_chrom,
            legend=legend_template,
            legend_chr23=legend_chr23,
            legend_par1=legend_par1,
            legend_par2=legend_par2,
            out_dir=self.output_dir.name,
        )

        # Checking the expected length
        self.assertEqual(expected_length, chrom_length)

        # Checking the file was created
        self.assertTrue(os.path.isfile(os.path.join(self.output_dir.name,
                                                    "chromosome_lengths.txt")))

        # Tests that we correctly read the file
        expected_chrom = {6: expected_length[6], 9: expected_length[9],
                          23: expected_length[23], 25: expected_length[25]}

        # Writing the file
        chrom_filename = os.path.join(self.output_dir.name,
                                      "chromosome_lengths.txt")
        with open(chrom_filename, "w") as o_file:
            for chrom, length in expected_chrom.items():
                if (chrom == 23) or (chrom == 25):
                    print(chrom, *length, sep="\t", file=o_file)
                else:
                    print(chrom, length, sep="\t", file=o_file)

        # Comparing what we got
        chrom_length = cli.get_chromosome_length(
            required_chrom=expected_chrom.keys(),
            legend=legend_template,
            legend_chr23=legend_chr23,
            legend_par1=legend_par1,
            legend_par2=legend_par2,
            out_dir=self.output_dir.name,
        )
        self.assertEqual(expected_chrom, chrom_length)

        # Removing some autosomes from the file
        del expected_chrom[9]
        del expected_chrom[23]
        del expected_chrom[25]
        with open(chrom_filename, "w") as o_file:
            for chrom, length in expected_chrom.items():
                if (chrom == 23) or (chrom == 25):
                    print(chrom, *length, sep="\t", file=o_file)
                else:
                    print(chrom, length, sep="\t", file=o_file)
        expected_chrom[9] = expected_length[9]
        expected_chrom[23] = expected_length[23]
        expected_chrom[25] = expected_length[25]

        # Tests that a warning is logged if there is a missing chromosome
        with self.assertLogs(level="WARNING") as cm:
            chrom_length = cli.get_chromosome_length(
                required_chrom=sorted(expected_chrom.keys()),
                legend=legend_template,
                legend_chr23=legend_chr23,
                legend_par1=legend_par1,
                legend_par2=legend_par2,
                out_dir=self.output_dir.name,
            )
        log_m = [
            "WARNING:root:missing length for chromosome 9",
            "WARNING:root:missing length for chromosome 23",
            "WARNING:root:missing length for chromosome 25",
        ]
        self.assertEqual(log_m, cm.output)

        # Testing the content
        self.assertEqual(expected_chrom, chrom_length)

    @unittest.skipIf(not cli.HAS_PYFAIDX,
                     "optional requirement (pyfaidx) not satisfied")
    def test_get_chrom_encoding(self):
        """Tests the 'get_chrom_encoding' function."""
        # Creating the reference file (fasta file) and index (using samtools)
        fasta_content = [[">{}".format(i), "ACGT"] for i in range(1, 25)]
        fasta_content.append([">26", "ACGT"])
        fasta_content.append([">Unaligned", "ACGT"])
        index_content = [
            ["1", "4", "3", "4", "5"],
            ["2", "4", "11", "4", "5"],
            ["3", "4", "19", "4", "5"],
            ["4", "4", "27", "4", "5"],
            ["5", "4", "35", "4", "5"],
            ["6", "4", "43", "4", "5"],
            ["7", "4", "51", "4", "5"],
            ["8", "4", "59", "4", "5"],
            ["9", "4", "67", "4", "5"],
            ["10", "4", "76", "4", "5"],
            ["11", "4", "85", "4", "5"],
            ["12", "4", "94", "4", "5"],
            ["13", "4", "103", "4", "5"],
            ["14", "4", "112", "4", "5"],
            ["15", "4", "121", "4", "5"],
            ["16", "4", "130", "4", "5"],
            ["17", "4", "139", "4", "5"],
            ["18", "4", "148", "4", "5"],
            ["19", "4", "157", "4", "5"],
            ["20", "4", "166", "4", "5"],
            ["21", "4", "175", "4", "5"],
            ["22", "4", "184", "4", "5"],
            ["23", "4", "193", "4", "5"],
            ["24", "4", "202", "4", "5"],
            ["26", "4", "210", "4", "5"],
            ["Unaligned", "4", "226", "4", "5"],
        ]
        reference_filename = os.path.join(self.output_dir.name, "ref.fasta")
        with open(reference_filename, "w") as o_file:
            for chromosome_fasta in fasta_content:
                print(*chromosome_fasta, sep="\n", file=o_file)
        with open(reference_filename + ".fai", "w") as o_file:
            for line in index_content:
                print(*line, sep="\t", file=o_file)

        # Reading the reference using pyfaidx
        reference = pyfaidx.Fasta(reference_filename, as_raw=True)

        # The expected result
        expected = {str(i): str(i) for i in range(1, 25)}
        expected["26"] = "26"

        # The observed result
        observed = cli.get_chrom_encoding(reference)
        self.assertEqual(expected, observed)
        reference.close()

        # Replacing 23 and 24 to X and Y
        fasta_content[22][0] = ">X"
        fasta_content[23][0] = ">Y"
        index_content[22][0] = "X"
        index_content[23][0] = "Y"

        # Writing to file
        with open(reference_filename, "w") as o_file:
            for chromosome_fasta in fasta_content:
                print(*chromosome_fasta, sep="\n", file=o_file)
        with open(reference_filename + ".fai", "w") as o_file:
            for line in index_content:
                print(*line, sep="\t", file=o_file)

        # Reading the reference using pyfaidx
        reference = pyfaidx.Fasta(reference_filename, as_raw=True)

        # The expected result
        expected["23"] = "X"
        expected["24"] = "Y"

        # The observed result
        observed = cli.get_chrom_encoding(reference)
        self.assertEqual(expected, observed)
        reference.close()

        # Adding chr everywhere
        for i in range(len(fasta_content)):
            fasta_content[i][0] = ">chr" + fasta_content[i][0][1:]
            index_content[i][0] = "chr" + index_content[i][0]

        # Writing to file
        with open(reference_filename, "w") as o_file:
            for chromosome_fasta in fasta_content:
                print(*chromosome_fasta, sep="\n", file=o_file)
        with open(reference_filename + ".fai", "w") as o_file:
            for line in index_content:
                print(*line, sep="\t", file=o_file)

        # Reading the reference using pyfaidx
        reference = pyfaidx.Fasta(reference_filename, as_raw=True)

        # The expected result
        expected = {str(i): "chr{}".format(i) for i in range(1, 23)}
        expected["23"] = "chrX"
        expected["24"] = "chrY"
        expected["26"] = "chr26"

        # The observed result
        observed = cli.get_chrom_encoding(reference)
        self.assertEqual(expected, observed)
        reference.close()

        # Replacing 23 and 24 to X and Y
        fasta_content[22][0] = ">chr23"
        fasta_content[23][0] = ">chr24"
        index_content[22][0] = "chr23"
        index_content[23][0] = "chr24"

        # Writing to file
        with open(reference_filename, "w") as o_file:
            for chromosome_fasta in fasta_content:
                print(*chromosome_fasta, sep="\n", file=o_file)
        with open(reference_filename + ".fai", "w") as o_file:
            for line in index_content:
                print(*line, sep="\t", file=o_file)

        # Reading the reference using pyfaidx
        reference = pyfaidx.Fasta(reference_filename, as_raw=True)

        # The expected result
        expected["23"] = "chr23"
        expected["24"] = "chr24"

        # The observed result
        observed = cli.get_chrom_encoding(reference)
        self.assertEqual(expected, observed)
        reference.close()

        # Finally, removing from chromosome 18 to trigger warnings
        fasta_content = fasta_content[:18]
        index_content = index_content[:18]

        # Writing to file
        with open(reference_filename, "w") as o_file:
            for chromosome_fasta in fasta_content:
                print(*chromosome_fasta, sep="\n", file=o_file)
        with open(reference_filename + ".fai", "w") as o_file:
            for line in index_content:
                print(*line, sep="\t", file=o_file)

        # Reading the reference using pyfaidx
        reference = pyfaidx.Fasta(reference_filename, as_raw=True)

        # The observed result
        with self.assertLogs(level="WARNING") as cm:
            cli.get_chrom_encoding(reference)
        log_m = [
            "WARNING:root:{}: chromosome not in reference".format(i)
            for i in range(19, 27) if i != 25
        ]
        self.assertEqual(log_m, cm.output)
        reference.close()

    @unittest.skipIf(not cli.HAS_PYFAIDX,
                     "optional requirement (pyfaidx) not satisfied")
    def test_is_reversed(self):
        """Tests the 'is_reversed' function."""
        # Creating the reference file (fasta file) and index (using samtools)
        fasta_content = (
            ">1\n"
            "ACGT\n"
            ">2\n"
            "ACGT\n"
            ">3\n"
            "acgt\n"
        )
        index_content = (
            "1\t4\t3\t4\t5\n"
            "2\t4\t11\t4\t5\n"
            "3\t4\t19\t4\t5\n"
        )
        reference_filename = os.path.join(self.output_dir.name, "ref.fasta")
        with open(reference_filename, "w") as o_file:
            o_file.write(fasta_content)
        with open(reference_filename + ".fai", "w") as o_file:
            o_file.write(index_content)

        # Reading the reference using pyfaidx
        reference = pyfaidx.Fasta(reference_filename, as_raw=True)

        # The chromosome encoding
        encoding = {"1": "1", "2": "2", "3": "3"}

        # Testing invalid allele (should return False)
        self.assertFalse(cli.is_reversed(
            "1", 1, "I", "D", reference, encoding),
        )
        self.assertFalse(cli.is_reversed(
            "1", 1, "Z", "A", reference, encoding),
        )
        self.assertFalse(cli.is_reversed(
            "1", 1, "A", "K", reference, encoding),
        )

        # Testing invalid chromosome (should return False)
        self.assertFalse(cli.is_reversed(
            "23", 1, "A", "C", reference, encoding),
        )

        # Testing invalid position (should return False)
        self.assertFalse(cli.is_reversed(
            "1", 100, "A", "C", reference, encoding),
        )

        # Testing valid input, without strand problem (should return False)
        self.assertFalse(cli.is_reversed(
            "1", 3, "G", "T", reference, encoding),
        )
        self.assertFalse(cli.is_reversed(
            "2", 4, "G", "T", reference, encoding),
        )
        self.assertFalse(cli.is_reversed(
            "3", 2, "g", "c", reference, encoding),
        )

        # Testing valid input, but strand problem (should return True)
        self.assertTrue(cli.is_reversed("1", 1, "T", "G", reference, encoding))
        self.assertTrue(cli.is_reversed("2", 2, "t", "g", reference, encoding))
        self.assertTrue(cli.is_reversed("3", 3, "T", "C", reference, encoding))
        self.assertTrue(cli.is_reversed("1", 4, "A", "C", reference, encoding))

        # Closing the reference
        reference.close()

    @unittest.skip("Test not implemented")
    def test_read_preamble(self):
        """Tests the 'read_preamble' function."""
        self.fail("Test not implemented")

    @unittest.skip("Test not implemented")
    def test_get_cross_validation_results(self):
        """Tests the 'get_cross_validation_results' function."""
        self.fail("Test not implemented")

    @unittest.skip("Test not implemented")
    def test_gather_imputation_stats(self):
        """Tests the 'gather_imputation_stats' function."""
        self.fail("Test not implemented")

    def test_gather_maf_stats(self):
        """Tests the 'gather_maf_stats' function."""
        # Creating one file per chromosome with two markers in each
        header = ["name", "maf"]
        content = [
            ["marker_1", "0.0"],
            ["marker_2", "0.2"],
            ["marker_3", "NA"],
            ["marker_4", "0.3"],
            ["marker_5", "9.4e-05"],
            ["marker_6", "0.04"],
            ["marker_7", "0.01"],
            ["marker_8", "0.003"],
            ["marker_9", "0.015"],
            ["marker_10", "0.020"],
            ["marker_11", "0.005"],
            ["marker_12", "0.004"],
            ["marker_13", "0.05"],
            ["marker_14", "0.055"],
            ["marker_15", "0.001"],
            ["marker_16", "0.004"],
            ["marker_17", "0.056"],
            ["marker_18", "0.0123"],
            ["marker_19", "0.005"],
            ["marker_20", "0.012"],
            ["marker_21", "0.001"],
            ["marker_22", "0.0316"],
            ["marker_23", "0.432"],
            ["marker_24", "0.03423"],
            ["marker_25", "0.00514"],
            ["marker_26", "0.01004"],
            ["marker_27", "0.011"],
            ["marker_28", "0.051"],
            ["marker_29", "0.048"],
            ["marker_30", "0.0484"],
            ["marker_31", "0.0871"],
            ["marker_32", "0.5"],
            ["marker_33", "0.006"],
            ["marker_34", "0.06"],
            ["marker_35", "0.08"],
            ["marker_36", "0.0784"],
            ["marker_37", "0.0984"],
            ["marker_38", "0.19444"],
            ["marker_39", "1.5e-04"],
            ["marker_40", "NA"],
            ["marker_41", "4.87e-07"],
            ["marker_42", "5.4e-08"],
            ["marker_43", "0.394"],
            ["marker_44", "0.004"],
        ]
        good_sites = ["marker_1", "marker_2", "marker_5", "marker_6",
                      "marker_7", "marker_8", "marker_9", "marker_10",
                      "marker_11", "marker_12", "marker_13", "marker_14",
                      "marker_15", "marker_16", "marker_17", "marker_18",
                      "marker_19", "marker_20", "marker_21", "marker_22",
                      "marker_23", "marker_24", "marker_25", "marker_26",
                      "marker_27", "marker_28", "marker_29", "marker_30",
                      "marker_31", "marker_32", "marker_33", "marker_34",
                      "marker_35", "marker_36", "marker_37", "marker_38",
                      "marker_39", "marker_41", "marker_43", "marker_44"]

        # The PDF generated
        frequency_barh = ""
        if cli.HAS_MATPLOTLIB:
            frequency_barh = os.path.join(self.output_dir.name,
                                          "frequency_barh.pdf")
        # The expected results
        nb_sites = len(good_sites)
        expected_results = {
            "nb_marker_with_maf":   str(nb_sites),
            "nb_maf_geq_01":        "26",
            "pct_maf_geq_01":       "{:.1f}".format(26 / nb_sites * 100),
            "nb_maf_geq_05":        "14",
            "pct_maf_geq_05":       "{:.1f}".format(14 / nb_sites * 100),
            "nb_maf_lt_05":         "26",
            "pct_maf_lt_05":        "{:.1f}".format(26 / nb_sites * 100),
            "nb_maf_lt_01":         "14",
            "pct_maf_lt_01":        "{:.1f}".format(14 / nb_sites * 100),
            "nb_maf_geq_01_lt_05":  "12",
            "pct_maf_geq_01_lt_05": "{:.1f}".format(12 / nb_sites * 100),
            "nb_maf_nan":           "0",
            "frequency_barh":       frequency_barh,
        }

        # Creating the files for the test
        filename_template = os.path.join(self.output_dir.name, "chr{chrom}",
                                         "final_impute2",
                                         "chr{chrom}.imputed.{suffix}")
        for chrom in autosomes:
            # Getting the name of the file
            maf_filename = filename_template.format(chrom=chrom, suffix="maf")
            good_sites_filename = filename_template.format(chrom=chrom,
                                                           suffix="good_sites")

            # Getting the directory and create it
            dirname = os.path.dirname(maf_filename)
            if not os.path.isdir(dirname):
                os.makedirs(dirname)
            self.assertTrue(os.path.isdir(dirname))

            # Creating the content of the maf file
            with open(maf_filename, "w") as o_file:
                print(*header, sep="\t", file=o_file)
                for i in range(2):
                    print(*content.pop(), sep="\t", file=o_file)

            # Creating the content of the good sites file
            with open(good_sites_filename, "w") as o_file:
                print(*good_sites, sep="\n", file=o_file)

            # Checking the files were created
            self.assertTrue(os.path.isfile(maf_filename))
            self.assertTrue(os.path.isfile(good_sites_filename))

        # Checking we passed all the content (MAF)
        self.assertEqual(0, len(content))

        # Executing the command (getting the observed data)
        observed = cli.gather_maf_stats(
            required_chrom=autosomes,
            o_dir=self.output_dir.name,
        )

        # Checking the observed results
        self.assertEqual(len(expected_results), len(observed))
        for expected_key, expected_value in expected_results.items():
            self.assertTrue(expected_key in observed)
            self.assertEqual(expected_value, observed[expected_key])

        # If matplotlib is installed, checking we have a figure (and not
        # otherwise)
        if cli.HAS_MATPLOTLIB:
            self.assertTrue(os.path.isfile(frequency_barh))
        else:
            self.assertFalse(os.path.isfile(frequency_barh))

        # Testing an invalid entry
        changed_filename = filename_template.format(chrom=1, suffix="maf")
        with open(changed_filename, "w") as o_file:
            print(*header, sep="\t", file=o_file)
            print("marker_1", "0.6", sep="\t", file=o_file)

        # This should raise an exception
        with self.assertRaises(GenipeError) as cm:
            cli.gather_maf_stats(
                required_chrom=autosomes,
                o_dir=self.output_dir.name,
            )
        self.assertEqual("{}: {}: invalid MAF".format("marker_1",
                                                      round(0.6, 3)),
                         str(cm.exception))

        # Testing an invalid entry
        changed_filename = filename_template.format(chrom=1, suffix="maf")
        with open(changed_filename, "w") as o_file:
            print(*header, sep="\t", file=o_file)
            print("marker_1", "-0.01", sep="\t", file=o_file)

        # This should raise an exception
        with self.assertRaises(GenipeError) as cm:
            cli.gather_maf_stats(
                required_chrom=autosomes,
                o_dir=self.output_dir.name,
            )
        self.assertEqual("{}: {}: invalid MAF".format("marker_1",
                                                      round(-0.01, 3)),
                         str(cm.exception))

        # Testing a good site with NA MAF
        changed_filename = filename_template.format(chrom=1, suffix="maf")
        with open(changed_filename, "w") as o_file:
            print(*header, sep="\t", file=o_file)
            print("marker_1", "NA", sep="\t", file=o_file)

        # This should issue a warning
        with self.assertLogs(level="WARNING") as cm:
            cli.gather_maf_stats(
                required_chrom=autosomes,
                o_dir=self.output_dir.name,
            )
        log_m = "WARNING:root:chr1: good sites with invalid MAF (NaN)"
        self.assertEqual(1, len(cm.output))
        self.assertEqual(log_m, cm.output[0])

        # Clearing the good sites file to see if we have a warning
        for chrom in autosomes:
            filename = filename_template.format(chrom=chrom,
                                                suffix="good_sites")
            with open(filename, "w") as o_file:
                pass

        # This should issue a warning
        with self.assertLogs(level="WARNING") as cm:
            cli.gather_maf_stats(
                required_chrom=autosomes,
                o_dir=self.output_dir.name,
            )
        log_m = ("WARNING:root:There were no marker with MAF (something went "
                 "wrong)")
        self.assertEqual(1, len(cm.output))
        self.assertEqual(log_m, cm.output[0])

        # Deleting a good sites file, and checking we have an error
        removed_filename = filename_template.format(chrom=1,
                                                    suffix="good_sites")
        os.remove(removed_filename)
        self.assertFalse(os.path.isfile(removed_filename))

        # This should raise an exception
        with self.assertRaises(GenipeError) as cm:
            cli.gather_maf_stats(
                required_chrom=autosomes,
                o_dir=self.output_dir.name,
            )
        self.assertEqual("{}: no such file".format(removed_filename),
                         str(cm.exception))

        # Deleting a MAF sites file, and checking we have an error
        removed_filename = filename_template.format(chrom=1, suffix="maf")
        os.remove(removed_filename)
        self.assertFalse(os.path.isfile(removed_filename))

        # This should raise an exception
        with self.assertRaises(GenipeError) as cm:
            cli.gather_maf_stats(
                required_chrom=autosomes,
                o_dir=self.output_dir.name,
            )
        self.assertEqual("{}: no such file".format(removed_filename),
                         str(cm.exception))

    @unittest.skip("Test not implemented")
    def test_gather_execution_time(self):
        """Tests the 'gather_execution_time' function."""
        self.fail("Test not implemented")
class DocumentDatabase:
    def __init__(self, reduce_memory=False):
        if reduce_memory:
            self.temp_dir = TemporaryDirectory()
            self.working_dir = Path(self.temp_dir.name)
            self.document_shelf_filepath = self.working_dir / 'shelf.db'
            self.document_shelf = shelve.open(str(
                self.document_shelf_filepath),
                                              flag='n',
                                              protocol=-1)
            self.documents = None
        else:
            self.documents = []
            self.document_shelf = None
            self.document_shelf_filepath = None
            self.temp_dir = None
        self.doc_lengths = []
        self.doc_cumsum = None
        self.cumsum_max = None
        self.reduce_memory = reduce_memory

    def add_document(self, document):
        if not document:
            return
        if self.reduce_memory:
            current_idx = len(self.doc_lengths)
            self.document_shelf[str(current_idx)] = document
        else:
            self.documents.append(document)
        self.doc_lengths.append(len(document))

    def _precalculate_doc_weights(self):
        self.doc_cumsum = np.cumsum(self.doc_lengths)
        self.cumsum_max = self.doc_cumsum[-1]

    def sample_doc(self, current_idx, sentence_weighted=True):
        # Uses the current iteration counter to ensure we don't sample the same doc twice
        if sentence_weighted:
            # With sentence weighting, we sample docs proportionally to their sentence length
            if self.doc_cumsum is None or len(self.doc_cumsum) != len(
                    self.doc_lengths):
                self._precalculate_doc_weights()
            rand_start = self.doc_cumsum[current_idx]
            rand_end = rand_start + self.cumsum_max - self.doc_lengths[
                current_idx]
            sentence_index = random.randrange(rand_start,
                                              rand_end) % self.cumsum_max
            sampled_doc_index = np.searchsorted(self.doc_cumsum,
                                                sentence_index,
                                                side='right')
        else:
            # If we don't use sentence weighting, then every doc has an equal chance to be chosen
            sampled_doc_index = (current_idx + random.randrange(
                1, len(self.doc_lengths))) % len(self.doc_lengths)
        assert sampled_doc_index != current_idx
        if self.reduce_memory:
            return self.document_shelf[str(sampled_doc_index)]
        else:
            return self.documents[sampled_doc_index]

    def __len__(self):
        return len(self.doc_lengths)

    def __getitem__(self, item):
        if self.reduce_memory:
            return self.document_shelf[str(item)]
        else:
            return self.documents[item]

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, traceback):
        if self.document_shelf is not None:
            self.document_shelf.close()
        if self.temp_dir is not None:
            self.temp_dir.cleanup()
示例#39
0
class HelloWorldTest(unittest.TestCase):
    def setUp(self):
        self.tempdir = TemporaryDirectory()

        config = {
            'master': ['+123'],
            'plugins': ['pingpong'],
            'testing_plugins': ['pingponglocktest'],
            'startup_notification': True,
        }
        configfile = Path.joinpath(Path(self.tempdir.name), 'config.yaml')
        yaml.dump(config, configfile.open('w'))

        self.mocker = Mocker()
        self.mocker.start()

        self.bot_popen = Popen(
            ['signal-bot', '--data-dir', self.tempdir.name, '--mocker'])
        # Wait for startup notification
        self.mocker.wait_for_n_messages(n=1)

    def tearDown(self):
        self.bot_popen.terminate()
        self.mocker.stop()
        self.tempdir.cleanup()

    def _assert_expected_messages(self, expect_messages):
        self.assertEqual([['Always at your service! ✔', [], ['+123']]] +
                         expect_messages,
                         [have[1:] for have in self.mocker.fromsignalbot])

    def test_master(self):
        self.mocker.messageSignalbot('+000', None, '//enable pingpong', [])
        self.mocker.messageSignalbot('+000', None, 'ping', [])
        self.mocker.messageSignalbot('+123', None, '//enable pingpong', [])
        self.mocker.messageSignalbot('+123', None, 'ping', [])
        self.mocker.messageSignalbot('+123', None, '//disable pingpong', [])
        self.mocker.messageSignalbot('+123', None, 'ping', [])
        self.mocker.wait_for_n_messages(n=5)
        expect_messages = [['You are not my master. ❌', [], ['+000']],
                           ['Plugin pingpong enabled. ✔', [], ['+123']],
                           ['pong', [], ['+123']],
                           ['Plugin pingpong disabled. ✔', [], ['+123']]]
        self._assert_expected_messages(expect_messages)

    def test_locking_basic(self):
        self.mocker.messageSignalbot('+123', None, '//enable pingponglocktest',
                                     [])
        self.mocker.messageSignalbot('+123', None, 'ping', [])
        self.mocker.messageSignalbot('+123', None, 'backup', [])
        self.mocker.wait_for_n_messages(n=5)
        self.mocker.messageSignalbot('+123', None, 'ping', [])
        self.mocker.wait_for_n_messages(n=3, timeout=10)
        expect_messages = [[
            'Plugin pingponglocktest enabled. ✔', [], ['+123']
        ], ['start pong', [], ['+123']], ['Acquiring lock...', [], ['+123']],
                           ['pong', [], ['+123']],
                           ['Locked - sleeping 1 sec ...', [], ['+123']],
                           ['... done sleeping / locking', [], ['+123']],
                           ['start pong', [], ['+123']],
                           ['pong', [], ['+123']]]
        self._assert_expected_messages(expect_messages)

    def test_locking_threeblocking(self):
        self.mocker.messageSignalbot('+123', None, '//enable pingponglocktest',
                                     [])
        self.mocker.messageSignalbot('+123', None, 'backup_A', [])
        self.mocker.messageSignalbot('+123', None, 'backup_B', [])
        self.mocker.messageSignalbot('+123', None, 'backup_C', [])
        self.mocker.wait_for_n_messages(n=8, timeout=10)
        expect_messages = [
            ['Plugin pingponglocktest enabled. ✔', [], ['+123']],
            [
                'backup_A: Attempting to acquire exclusive lock...', [],
                ['+123']
            ],
            [
                'backup_B: Attempting to acquire exclusive lock...', [],
                ['+123']
            ],
            ['Isolation lock could not be acquired. ❌', [], ['+123']],
            [
                'backup_C: Attempting to acquire exclusive lock...', [],
                ['+123']
            ],
            [
                'We want to do our own handling if we cannot get the exclusive '
                'lock. ❌', [], ['+123']
            ],
            ['backup_A: Locked - sleeping 1 sec ...', [], ['+123']],
            ['backup_A: ... done sleeping / locking', [], ['+123']],
        ]
        self._assert_expected_messages(expect_messages)
示例#40
0
def tmp(tmp_path_factory):
    td = TemporaryDirectory(dir=tmp_path_factory.getbasetemp())
    try:
        yield td.name
    finally:
        td.cleanup()
示例#41
0
class DebugNode(object):
    """ Wraps the bearsd debug node plugin for easier automated testing of the Bears Network"""
    def __init__(self,
                 bearsd,
                 data_dir,
                 args='',
                 plugins=[],
                 apis=[],
                 bearsd_out=None,
                 bearsd_err=None):
        """ Creates a bearsd debug node.

      It can be ran by using 'with debug_node:'
      While in the context of 'with' the debug node will continue to run.
      Upon exit of 'with' the debug will exit and clean up temporary files.
      This class also contains methods to allow basic manipulation of the blockchain.
      For all other requests, the python-bears library should be used.

      args:
         bearsd -- The string path to the location of the bearsd binary
         data_dir -- The string path to an existing bearsd data directory which will be used to pull blocks from.
         args -- Other string args to pass to bearsd.
         plugins -- Any additional plugins to start with the debug node. Modify plugins DebugNode.plugins
         apis -- Any additional APIs to have available. APIs will retain this order for accesibility starting at id 3.
            database_api is 0, login_api is 1, and debug_node_api is 2. Modify apis with DebugNode.api
         bearsd_stdout -- A stream for bearsd's stdout. Default is to pipe to /dev/null
         bearsd_stderr -- A stream for bearsd's stderr. Default is to pipe to /dev/null
      """
        self._data_dir = None
        self._debug_key = None
        self._FNULL = None
        self._rpc = None
        self._bearsd_bin = None
        self._bearsd_lock = None
        self._bearsd_process = None
        self._temp_data_dir = None

        self._bearsd_bin = Path(bearsd)
        if (not self._bearsd_bin.exists()):
            raise ValueError('bearsd does not exist')
        if (not self._bearsd_bin.is_file()):
            raise ValueError('bearsd is not a file')

        self._data_dir = Path(data_dir)
        if (not self._data_dir.exists()):
            raise ValueError(
                'data_dir either does not exist or is not a properly constructed bears data directory'
            )
        if (not self._data_dir.is_dir()):
            raise ValueError('data_dir is not a directory')

        self.plugins = plugins
        self.apis = apis

        if (args != ''):
            self._args = args.split("\\s")
        else:
            self._args = list()

        self._FNULL = open(devnull, 'w')
        if (bearsd_out != None):
            self.bearsd_out = bearsd_out
        else:
            self.bearsd_out = self._FNULL

        if (bearsd_err != None):
            self.bearsd_err = bearsd_err
        else:
            self.bearsd_err = self._FNULL

        self._debug_key = '5JHNbFNDg834SFj8CMArV6YW7td4zrPzXveqTfaShmYVuYNeK69'
        self._bearsd_lock = Lock()

    def __enter__(self):
        self._bearsd_lock.acquire()

        # Setup temp directory to use as the data directory for this
        self._temp_data_dir = TemporaryDirectory()

        for child in self._data_dir.iterdir():
            if (child.is_dir()):
                copytree(str(child),
                         str(self._temp_data_dir.name) + '/' + child.name)

        db_version = Path(self._data_dir.name) / 'db_version'
        if (db_version.exists() and not db_version.is_dir()):
            copy2(str(db_version),
                  str(self._temp_data_dir.name) + '/db_version')

        config = Path(self._temp_data_dir.name) / 'config.ini'
        config.touch()
        config.write_text(self._get_config())

        bearsd = [
            str(self._bearsd_bin),
            '--data-dir=' + str(self._temp_data_dir.name)
        ]
        bearsd.extend(self._args)

        self._bearsd_process = Popen(bearsd,
                                     stdout=self.bearsd_out,
                                     stderr=self.bearsd_err)
        self._bearsd_process.poll()
        sleep(5)
        if (not self._bearsd_process.returncode):
            self._rpc = BearsNodeRPC('ws://127.0.0.1:8095', '', '')
        else:
            raise Exception("bearsd did not start properly...")

    def __exit__(self, exc, value, tb):
        self._rpc = None

        if (self._bearsd_process != None):
            self._bearsd_process.poll()

            if (not self._bearsd_process.returncode):
                self._bearsd_process.send_signal(SIGINT)

                sleep(7)
                self._bearsd_process.poll()

                if (not self._bearsd_process.returncode):
                    self._bearsd_process.send_signal(SIGTERM)

                    sleep(5)
                    self._bearsd_process.poll()

                    if (self._bearsd_process.returncode):
                        loggin.error(
                            'bearsd did not properly shut down after SIGINT and SIGTERM. User intervention may be required.'
                        )

        self._bearsd_process = None
        self._temp_data_dir.cleanup()
        self._temp_data_dir = None
        self._bearsd_lock.release()

    def _get_config(self):
        return "# no seed-node in config file or command line\n" \
            + "p2p-endpoint = 127.0.0.1:3331       # bind to localhost to prevent remote p2p nodes from connecting to us\n" \
            + "rpc-endpoint = 127.0.0.1:8095       # bind to localhost to secure RPC API access\n" \
            + "enable-plugin = witness debug_node " + " ".join( self.plugins ) + "\n" \
            + "public-api = database_api login_api debug_node_api " + " ".join( self.apis ) + "\n"

    def debug_generate_blocks(self, count):
        """
      Generate blocks on the current chain. Pending transactions will be applied, otherwise the
      blocks will be empty.

      The debug node plugin requires a WIF key to sign blocks with. This class uses the key
      5JHNbFNDg834SFj8CMArV6YW7td4zrPzXveqTfaShmYVuYNeK69 which was generated from
      `get_dev_key bears debug`. Do not use this key on the live chain for any reason.

      args:
         count -- The number of new blocks to generate.

      returns:
         int: The number of blocks actually pushed.
      """
        if (count < 0):
            raise ValueError("count must be a positive non-zero number")
        return self._rpc.rpcexec(
            json.loads(
                '{"jsonrpc": "2.0", "method": "call", "params": [2,"debug_generate_blocks",["'
                + self._debug_key + '",' + str(count) + ']], "id": 1}'))

    def debug_generate_blocks_until(self, timestamp, generate_sparsely=True):
        """
      Generate block up until a head block time rather than a specific number of blocks. As with
      `debug_generate_blocks` all blocks will be empty unless there were pending transactions.

      The debug node plugin requires a WIF key to sign blocks with. This class uses the key
      5JHNbFNDg834SFj8CMArV6YW7td4zrPzXveqTfaShmYVuYNeK69 which was generated from
      `get_dev_key bears debug`. Do not use this key on the live chain for any reason.

      args:
         time -- The desired new head block time. This is a POSIX Timestmap.
         generate_sparsely -- True if you wish to skip all intermediate blocks between the current
            head block time and the desired head block time. This is useful to trigger events, such
            as payouts and bandwidth updates, without generating blocks. However, many automatic chain
            updates (such as block inflation) will not continue at their normal rate as they are only
            calculated when a block is produced.

      returns:
         (time, int): A tuple including the new head block time and the number of blocks that were
            generated.
      """
        if (not isinstance(timestamp, int)):
            raise ValueError("Time must be a int")
        generate_sparsely_str = "true"
        if (not generate_sparsely):
            generate_sparsely_str = "false"

        iso_string = datetime.fromtimestamp(
            timestamp, timezone.utc).isoformat().split('+')[0].split('-')
        if (len(iso_string) == 4):
            iso_string = iso_string[:-1]
        iso_string = '-'.join(iso_string)

        print(iso_string)
        return self._rpc.rpcexec(
            json.loads(
                '{"jsonrpc": "2.0", "method": "call", "params": [2,"debug_generate_blocks_until",["'
                + self._debug_key + '","' + iso_string + '","' +
                generate_sparsely_str + '"]], "id": 1}'))

    def debug_set_hardfork(self, hardfork_id):
        """
      Schedules a hardfork to happen on the next block. call `debug_generate_blocks( 1 )` to trigger
      the hardfork. All hardforks with id less than or equal to hardfork_id will be scheduled and
      triggered.

      args:
         hardfork_id: The id of the hardfork to set. Hardfork IDs start at 1 (0 is genesis) and increment
            by one for each hardfork. The maximum value is BEARS_NUM_HARDFORKS in chain/hardfork.d/0-preamble.hf
      """
        if (hardfork_id < 0):
            raise ValueError("hardfork_id cannot be negative")

        self._rpc.rpcexec(
            json.loads(
                '{"jsonrpc": "2.0", "method": "call", "params": [2,"debug_set_hardfork",['
                + str(hardfork_id) + ']], "id":1}'))

    def debug_has_hardfork(self, hardfork_id):
        return self._rpc.rpcexec(
            json.loads(
                '{"jsonrpc": "2.0", "method": "call", "params": [2,"debug_has_hardfork",['
                + str(hardfork_id) + ']], "id":1}'))

    def debug_get_witness_schedule(self):
        return self._rpc.rpcexec(
            json.loads(
                '{"jsonrpc": "2.0", "method": "call", "params": [2,"debug_get_witness_schedule",[]], "id":1}'
            ))

    def debug_get_hardfork_property_object(self):
        return self._rpc.rpcexec(
            json.loads(
                '{"jsonrpc": "2.0", "method": "call", "params": [2,"debug_get_hardfork_property_object",[]], "id":1}'
            ))
示例#42
0
文件: figure.py 项目: wbm06/pygmt
class Figure(BasePlotting):
    """
    A GMT figure to handle all plotting.

    Use the plotting methods of this class to add elements to the figure.  You
    can preview the figure using :meth:`pygmt.Figure.show` and save the figure
    to a file using :meth:`pygmt.Figure.savefig`.

    Unlike traditional GMT figures, no figure file is generated until you call
    :meth:`pygmt.Figure.savefig` or :meth:`pygmt.Figure.psconvert`.

    Examples
    --------

    >>> fig = Figure()
    >>> fig.basemap(region=[0, 360, -90, 90], projection='W7i', frame=True)
    >>> fig.savefig("my-figure.png")
    >>> # Make sure the figure file is generated and clean it up
    >>> import os
    >>> os.path.exists('my-figure.png')
    True
    >>> os.remove('my-figure.png')

    The plot region can be specified through ISO country codes (for example,
    ``'JP'`` for Japan):

    >>> fig = Figure()
    >>> fig.basemap(region='JP', projection="M3i", frame=True)
    >>> # The fig.region attribute shows the WESN bounding box for the figure
    >>> print(', '.join('{:.2f}'.format(i)  for i in fig.region))
    122.94, 145.82, 20.53, 45.52

    """
    def __init__(self):
        self._name = unique_name()
        self._preview_dir = TemporaryDirectory(prefix=self._name + "-preview-")
        self._activate_figure()

    def __del__(self):
        # Clean up the temporary directory that stores the previews
        if hasattr(self, "_preview_dir"):
            self._preview_dir.cleanup()

    def _activate_figure(self):
        """
        Start and/or activate the current figure.

        All plotting commands run afterward will append to this figure.

        Unlike the command-line version (``gmt figure``), this method does not
        trigger the generation of a figure file. An explicit call to
        :meth:`pygmt.Figure.savefig` or :meth:`pygmt.Figure.psconvert` must be
        made in order to get a file.
        """
        # Passing format '-' tells pygmt.end to not produce any files.
        fmt = "-"
        with Session() as lib:
            lib.call_module("figure", "{} {}".format(self._name, fmt))

    def _preprocess(self, **kwargs):
        """
        Call the ``figure`` module before each plotting command to ensure we're
        plotting to this particular figure.
        """
        self._activate_figure()
        return kwargs

    @property
    def region(self):
        "The geographic WESN bounding box for the current figure."
        self._activate_figure()
        with Session() as lib:
            wesn = lib.extract_region()
        return wesn

    @fmt_docstring
    @use_alias(
        A="crop",
        C="gs_option",
        E="dpi",
        F="prefix",
        I="icc_gray",
        T="fmt",
        Q="anti_aliasing",
    )
    @kwargs_to_strings()
    def psconvert(self, **kwargs):
        """
        Convert [E]PS file(s) to other formats.

        Converts one or more PostScript files to other formats (BMP, EPS, JPEG,
        PDF, PNG, PPM, SVG, TIFF) using GhostScript.

        If no input files are given, will convert the current active figure
        (see :func:`pygmt.figure`). In this case, an output name must be given
        using parameter *prefix*.

        Full option list at :gmt-docs:`psconvert.html`

        {aliases}

        Parameters
        ----------
        crop : str or bool
            Adjust the BoundingBox and HiResBoundingBox to the minimum required
            by the image content. Append ``u`` to first remove any GMT-produced
            time-stamps. Default is True.
        gs_option : str
            Specify a single, custom option that will be passed on to
            GhostScript as is.
        dpi : int
            Set raster resolution in dpi. Default = 720 for PDF, 300 for
            others.
        prefix : str
            Force the output file name. By default output names are constructed
            using the input names as base, which are appended with an
            appropriate extension. Use this option to provide a different name,
            but without extension. Extension is still determined automatically.
        icc_gray : bool
            Enforce gray-shades by using ICC profiles.
        anti_aliasing : str
            Set the anti-aliasing options for graphics or text. Append the size
            of the subsample box (1, 2, or 4) [4]. Default is no anti-aliasing
            (same as bits = 1).
        fmt : str
            Sets the output format, where *b* means BMP, *e* means EPS, *E*
            means EPS with PageSize command, *f* means PDF, *F* means
            multi-page PDF, *j* means JPEG, *g* means PNG, *G* means
            transparent PNG (untouched regions are transparent), *m* means PPM,
            *s* means SVG, and *t* means TIFF [default is JPEG]. To ``'bjgt'``
            you can append ``'+m'`` in order to get a monochrome (grayscale)
            image. The EPS format can be combined with any of the other
            formats. For example, ``'ef'`` creates both an EPS and a PDF file.
            Using ``'F'`` creates a multi-page PDF file from the list of input
            PS or PDF files. It requires the *prefix* option.

        """
        kwargs = self._preprocess(**kwargs)
        # Default cropping the figure to True
        if "A" not in kwargs:
            kwargs["A"] = ""
        with Session() as lib:
            lib.call_module("psconvert", build_arg_string(kwargs))

    def savefig(self,
                fname,
                transparent=False,
                crop=True,
                anti_alias=True,
                show=False,
                **kwargs):
        """
        Save the figure to a file.

        This method implements a matplotlib-like interface for
        :meth:`~gmt.Figure.psconvert`.

        Supported formats: PNG (``.png``), JPEG (``.jpg``), PDF (``.pdf``),
        BMP (``.bmp``), TIFF (``.tif``), EPS (``.eps``), and KML (``.kml``).
        The KML output generates a companion PNG file.

        You can pass in any keyword arguments that
        :meth:`~gmt.Figure.psconvert` accepts.

        Parameters
        ----------
        fname : str
            The desired figure file name, including the extension. See the list
            of supported formats and their extensions above.
        transparent : bool
            If True, will use a transparent background for the figure. Only
            valid for PNG format.
        crop : bool
            If True, will crop the figure canvas (page) to the plot area.
        anti_alias: bool
            If True, will use anti aliasing when creating raster images (PNG,
            JPG, TIf). More specifically, uses options ``Qt=2, Qg=2`` in
            :meth:`~gmt.Figure.psconvert`. Ignored if creating vector graphics.
            Overrides values of ``Qt`` and ``Qg`` passed in through ``kwargs``.
        show: bool
            If True, will open the figure in an external viewer.
        dpi : int
            Set raster resolution in dpi. Default is 720 for PDF, 300 for
            others.

        """
        # All supported formats
        fmts = dict(png="g",
                    pdf="f",
                    jpg="j",
                    bmp="b",
                    eps="e",
                    tif="t",
                    kml="g")

        prefix, ext = os.path.splitext(fname)
        ext = ext[1:]  # Remove the .
        if ext not in fmts:
            raise GMTInvalidInput("Unknown extension '.{}'".format(ext))
        fmt = fmts[ext]
        if transparent:
            if fmt != "g":
                raise GMTInvalidInput(
                    "Transparency unavailable for '{}', only for png.".format(
                        ext))
            fmt = fmt.upper()
        if anti_alias:
            kwargs["Qt"] = 2
            kwargs["Qg"] = 2
        if ext == "kml":
            kwargs["W"] = "+k"

        self.psconvert(prefix=prefix, fmt=fmt, crop=crop, **kwargs)
        if show:
            launch_external_viewer(fname)

    def show(self, dpi=300, width=500, method="static"):
        """
        Display a preview of the figure.

        Inserts the preview in the Jupyter notebook output. You will need to
        have IPython installed for this to work. You should have it if you are
        using the notebook.

        If ``method='external'``, makes PDF preview instead and opens it in the
        default viewer for your operating system (falls back to the default web
        browser). Note that the external viewer does not block the current
        process, so this won't work in a script.

        Parameters
        ----------
        dpi : int
            The image resolution (dots per inch).
        width : int
            Width of the figure shown in the notebook in pixels. Ignored if
            ``method='external'``.
        method : str
            How the figure will be displayed. Options are (1) ``'static'``: PNG
            preview (default); (2) ``'external'``: PDF preview in an external
            program.

        Returns
        -------
        img : IPython.display.Image
            Only if ``method != 'external'``.

        """
        # Module level variable to know which figures had their show method
        # called. Needed for the sphinx-gallery scraper.
        SHOWED_FIGURES.append(self)

        if method not in ["static", "external"]:
            raise GMTInvalidInput("Invalid show method '{}'.".format(method))
        if method == "external":
            pdf = self._preview(fmt="pdf",
                                dpi=dpi,
                                anti_alias=False,
                                as_bytes=False)
            launch_external_viewer(pdf)
            img = None
        elif method == "static":
            png = self._preview(fmt="png",
                                dpi=dpi,
                                anti_alias=True,
                                as_bytes=True,
                                transparent=True)
            if Image is None:
                raise GMTError(" ".join([
                    "Cannot find IPython.",
                    "Make sure you have it installed",
                    "or use 'external=True' to open in an external viewer.",
                ]))
            img = Image(data=png, width=width)
        return img

    def shift_origin(self, xshift=None, yshift=None):
        """
        Shift plot origin in x and/or y directions.

        This method shifts plot origin relative to the current origin by
        (*xshift*, *yshift*) and optionally append the length unit (**c**,
        **i**, or **p**).

        Prepend **a** to shift the origin back to the original position after
        plotting, prepend **c** to center the plot on the center of the paper
        (optionally add shift), prepend **f** to shift the origin relative to
        the fixed lower left corner of the page, or prepend **r** [Default] to
        move the origin relative to its current location.

        Detailed usage at
        :gmt-docs:`cookbook/options.html#plot-positioning-and-layout-the-x-y-options`

        Parameters
        ----------
        xshift : str
            Shift plot origin in x direction.
        yshift : str
            Shift plot origin in y direction.
        """
        self._preprocess()
        args = ["-T"]
        if xshift:
            args.append("-X{}".format(xshift))
        if yshift:
            args.append("-Y{}".format(yshift))

        with Session() as lib:
            lib.call_module("plot", " ".join(args))

    def _preview(self, fmt, dpi, as_bytes=False, **kwargs):
        """
        Grab a preview of the figure.

        Parameters
        ----------
        fmt : str
            The image format. Can be any extension that
            :meth:`~gmt.Figure.savefig` recognizes.
        dpi : int
            The image resolution (dots per inch).
        as_bytes : bool
            If ``True``, will load the image as a bytes string and return that
            instead of the file name.

        Returns
        -------
        preview : str or bytes
            If ``as_bytes=False``, this is the file name of the preview image
            file. Else, it is the file content loaded as a bytes string.

        """
        fname = os.path.join(self._preview_dir.name,
                             "{}.{}".format(self._name, fmt))
        self.savefig(fname, dpi=dpi, **kwargs)
        if as_bytes:
            with open(fname, "rb") as image:
                preview = image.read()
            return preview
        return fname

    def _repr_png_(self):
        """
        Show a PNG preview if the object is returned in an interactive shell.
        For the Jupyter notebook or IPython Qt console.
        """
        png = self._preview(fmt="png", dpi=70, anti_alias=True, as_bytes=True)
        return png

    def _repr_html_(self):
        """
        Show the PNG image embedded in HTML with a controlled width.
        Looks better than the raw PNG.
        """
        raw_png = self._preview(fmt="png",
                                dpi=300,
                                anti_alias=True,
                                as_bytes=True)
        base64_png = base64.encodebytes(raw_png)
        html = '<img src="data:image/png;base64,{image}" width="{width}px">'
        return html.format(image=base64_png.decode("utf-8"), width=500)
示例#43
0
class SubjectData:
    layout: BIDSLayout
    subject: str
    session: str
    output_dir: Path
    check_duration: bool = True

    tmp_bids_dir: Path = field(init=False)
    in_data: dict = field(init=False)
    preprocessing_dir: Path = field(init=False)
    preprocessing_crashed_dir: Path = field(init=False)
    fmriprep_logfile_stub: Path = field(init=False)
    fmriprep_dir: Path = field(init=False)
    fmriprep_crashed_dir: Path = field(init=False)
    feature_dir: Path = field(init=False)
    preprocessed_files: dict = field(init=False)
    features_files: dict = field(init=False)

    def __post_init__(self):
        self.tmp_bids_dir = Path(self.layout.root)
        self.in_data = get_subject_in_data(self.layout, self.subject,
                                           self.session, self.check_duration)

        self.preprocessing_dir = Path(self.output_dir) / "preprocessing"
        self.preprocessing_crashed_dir = Path(
            self.output_dir) / "preprocessing_crashed"
        self.fmriprep_logfile_stub = self.preprocessing_dir / f"logs/sub-{self.subject}/sub-{self.subject}_fmriprep"
        self.fmriprep_dir = self.preprocessing_dir / f"sub-{self.subject}"
        self.fmriprep_crashed_dir = self.preprocessing_crashed_dir / f"sub-{self.subject}"
        self.feature_dir = Path(
            self.output_dir
        ) / f"features/sub-{self.subject}/ses-{self.session}"
        self.get_expected_files()

    def __str__(self):
        d = self.__reduce__()[2]
        del d["layout"]
        return pprint.pformat(d, indent=4)

    def get_expected_files(self):
        # files that are expected after preprocessing
        # preprocessed_files = {"modality1":
        #                           {"run1":
        #                                {"file1": "filename1",
        #                                 "file2": "filename2",
        #                                 }
        #                            }
        #                       }
        self.preprocessed_files = {"fs": {"runx": {}}, "fmri": {}}
        self.preprocessed_files["fs"]["runx"] = {
            "done": (self.fmriprep_dir /
                     f"freesurfer/sub-{self.subject}/scripts/recon-all.done")
        }

        for run, func_info in self.in_data["func_runs"].items():
            # preprocessed functional files
            f = func_info["fmri_file"]
            out_filename = f.name[:f.name.find(
                "_bold.nii"
            )] + "_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz"
            out_filepath_fmri = (
                self.fmriprep_dir /
                f"fmriprep/sub-{self.subject}/ses-{self.session}/func/" /
                out_filename)

            # mask file
            out_filename = f.name[:f.name.find(
                "_bold.nii"
            )] + "_space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz"
            out_filepath_mask = (
                self.fmriprep_dir /
                f"fmriprep/sub-{self.subject}/ses-{self.session}/func/" /
                out_filename)

            # confounds file
            out_filename = f.name[:f.name.find(
                "_bold.nii")] + "_desc-confounds_regressors.tsv"
            out_filepath_confounds = (
                self.fmriprep_dir /
                f"fmriprep/sub-{self.subject}/ses-{self.session}/func/" /
                out_filename)

            self.preprocessed_files["fmri"][run] = {
                "fmri_file": out_filepath_fmri,
                "mask_file": out_filepath_mask,
                "confounds_file": out_filepath_confounds
            }

        # files that are expected after feature extraction
        self.features_files = {
            "fs": [
                self.feature_dir /
                f"sub-{self.subject}_ses-{self.session}_desc-fsFeatures.pkl"
            ],
            "fmri":
            [(self.feature_dir /
              f"sub-{self.subject}_ses-{self.session}_desc-fmriFeatures.pkl"),
             (self.feature_dir /
              f"sub-{self.subject}_ses-{self.session}_desc-fmriFullFeatures.pkl"
              )]
        }

    def create_fmriprep_tmp_outdir(self):
        """
        creates tmpdir and links specified session into tempdir (since fmriprep does not allow
        to select sessions)
        """
        # create local output folder
        self.tmp_output_obj = TemporaryDirectory(
        )  # self.tmp_output_obj.cleanup for tear down
        self.tmp_output_dir = Path(self.tmp_output_obj.name)
        self.tmp_fmriprep_dir = self.tmp_output_dir / "fmriprep"

    def check_preprocessing(self, raise_on_missing=True):
        preprocessed_files = []
        for mod in self.preprocessed_files.keys():
            for run in self.preprocessed_files[mod].keys():
                for file in self.preprocessed_files[mod][run].keys():
                    preprocessed_files.append(
                        self.preprocessed_files[mod][run][file])

        found = []
        missing = []
        for f in preprocessed_files:
            if f.is_file():
                found.append(f)
            else:
                missing.append(f)

        if raise_on_missing and missing:
            msg = f"Expected {len(preprocessed_files)} files, but only {len(found)} found with " \
                f"{len(missing)} missing. {self.subject}, {self.session}.\n" \
                f"Found: {found}\n" \
                f"Missing: {missing}."
            raise RuntimeError(msg)
        return found, missing

    def compile_fmriprep_cmd(self, fs_license_file, test_run, n_cpus):
        sloppy = "--sloppy" if test_run else ""
        self.fmriprep_cmd = (f"conda run -n fmriprep_env "
                             f"fmriprep "
                             f"{str(self.tmp_bids_dir)} "
                             f"{str(self.tmp_fmriprep_dir)} "
                             f"participant "
                             f"--resource-monitor "
                             f"--fs-license-file "
                             f"{fs_license_file} "
                             f"--n_cpus {n_cpus} "
                             f"--skip_bids_validation --notrack {sloppy}")

    def run_fmriprep(self, fs_license_file, test_run, n_cpus):
        """
        fmriprep and dependencies are in the fmriprep_env conda env
        raw data of the relevant sessions is linked into a tempfolder and the fmriprep output is saved locally
        during execution (see https://github.com/poldracklab/smriprep/issues/44) and copied to the mounted output
        folder after processing
        """
        self.create_fmriprep_tmp_outdir()
        logger.debug(f"Create tmp fmriprep output dir {self.tmp_output_dir}")

        self.compile_fmriprep_cmd(fs_license_file, test_run, n_cpus)

        try:
            run_cmd(self.fmriprep_cmd,
                    output_to_file=self.fmriprep_logfile_stub)
        except:
            logger.warning(f"Fmriprep 1st run failed. Try again.")
            try:
                run_cmd(self.fmriprep_cmd,
                        output_to_file=self.fmriprep_logfile_stub)
            except:
                logger.warning(
                    f"Fmriprep 2nd run failed. Try again with 1 cpu")
                self.compile_fmriprep_cmd(fs_license_file, test_run, n_cpus=1)
                try:
                    run_cmd(self.fmriprep_cmd,
                            output_to_file=self.fmriprep_logfile_stub)
                    logger.warning("Fmriprep 3rd run successful")
                except:
                    logger.warning("Fmriprep 3rd run failed as well")
                    self.save_fmriprep_outputs(failed=True)
                    raise Exception("Fmriprep failed.")

        logger.debug(f"Saving fmriprep output")
        self.save_fmriprep_outputs(failed=False)
        logger.info("Fmriprep done")

        _, _ = self.check_preprocessing()

    def save_fmriprep_outputs(self, failed=False):
        output_dir = self.fmriprep_crashed_dir if failed else self.fmriprep_dir
        output_dir.parent.mkdir(parents=True, exist_ok=True)

        fsav = (self.tmp_fmriprep_dir / "freesurfer").glob("fsaverage*")
        for f in fsav:
            shutil.rmtree(f)

        # shutil.copytree gives permission error
        logger.info(f"Copy {self.tmp_fmriprep_dir} to {output_dir}")
        run_cmd(f"cp -r {self.tmp_fmriprep_dir} {output_dir}{os.sep}")
        logger.info(f"Copy done")

    def teardown_tmp_dirs(self):
        self.tmp_output_obj.cleanup()

    def check_features(self, raise_on_missing=True):
        found = []
        missing = []
        for mod in self.features_files.keys():
            for f in self.features_files[mod]:
                if f.is_file():
                    found.append(f)
                else:
                    missing.append(f)

        if raise_on_missing and missing:
            msg = f"Expected {self.features_files}, but only {found} found with " \
                f"{len(missing)} missing. {self.subject}, {self.session}.\n" \
                f"Found: {found}\n" \
                f"Missing: {missing}."
            raise RuntimeError(msg)
        return found, missing
示例#44
0
class ParseTests(TestCase, PretrainedWeightsBase):
    @classmethod
    def setUpClass(cls):
        cls.download_pre_trained_weights(cls)

    @pytest.fixture(autouse=True)
    def inject_fixtures(self, caplog):
        self._caplog = caplog

    def setUp(self) -> None:
        self.temp_dir_obj = TemporaryDirectory()

        self.fake_data_path_pickle = os.path.join(self.temp_dir_obj.name,
                                                  "fake_data.p")

        self.fake_data_path_csv = os.path.join(self.temp_dir_obj.name,
                                               "fake_data.csv")
        self.a_unsupported_data_path = os.path.join(self.temp_dir_obj.name,
                                                    "fake_data.txt")
        self.fake_data_path_json = os.path.join(self.temp_dir_obj.name,
                                                "fake_data.json")

        self.pickle_p_export_filename = "a_file.p"
        self.pickle_pickle_export_filename = "a_file.pickle"
        self.csv_export_filename = "a_file.csv"
        self.json_export_filename = "a_file.json"

        self.a_fasttext_model_type = "fasttext"
        self.a_fasttext_att_model_type = "fasttext-attention"
        self.a_fasttext_light_model_type = "fasttext-light"
        self.a_bpemb_model_type = "bpemb"
        self.a_bpemb_att_model_type = "bpemd-attention"

        self.cpu_device = "cpu"
        self.gpu_device = "0"

        self.create_parser()

    def tearDown(self) -> None:
        self.temp_dir_obj.cleanup()

    def create_parser(self):
        self.parser = argparse.ArgumentParser()
        self.parser.add_argument(
            "parsing_model",
            choices=[
                self.a_fasttext_model_type,
                self.a_fasttext_att_model_type,
                self.a_fasttext_light_model_type,
                self.a_bpemb_model_type,
                self.a_bpemb_att_model_type,
            ],
        )

        self.parser.add_argument("dataset_path", type=str)

        self.parser.add_argument("export_filename", type=str)

        self.parser.add_argument("--device", type=str, default="0")

        self.parser.add_argument("--path_to_retrained_model",
                                 type=str,
                                 default=None)

        self.parser.add_argument("--csv_column_name", type=str, default=None)

        self.parser.add_argument("--csv_column_separator",
                                 type=str,
                                 default="\t")

        self.parser.add_argument("--log", type=bool_parse, default="True")

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_integration_cpu(self):
        create_pickle_file(self.fake_data_path_pickle, predict_container=True)

        parse.main([
            self.a_fasttext_model_type,
            self.fake_data_path_pickle,
            self.pickle_p_export_filename,
            "--device",
            self.cpu_device,
        ])

        export_path = generate_export_path(self.fake_data_path_pickle,
                                           self.pickle_p_export_filename)
        self.assertTrue(os.path.isfile(export_path))

    @skipIf(not torch.cuda.is_available(), "no gpu available")
    def test_integration_gpu(self):
        create_pickle_file(self.fake_data_path_pickle, predict_container=True)

        parse.main([
            self.a_fasttext_model_type,
            self.fake_data_path_pickle,
            self.pickle_p_export_filename,
            "--device",
            self.gpu_device,
        ])

        export_path = generate_export_path(self.fake_data_path_pickle,
                                           self.pickle_p_export_filename)
        self.assertTrue(os.path.isfile(export_path))

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_integration_logging(self):
        with self._caplog.at_level(logging.INFO):
            create_pickle_file(self.fake_data_path_pickle,
                               predict_container=True)
            parse.main([
                self.a_fasttext_model_type,
                self.fake_data_path_pickle,
                self.pickle_p_export_filename,
                "--device",
                self.cpu_device,
            ])
        expected_first_message = (
            f"Parsing dataset file {self.fake_data_path_pickle} using the parser "
            f"FastTextAddressParser")
        actual_first_message = self._caplog.records[0].message
        self.assertEqual(expected_first_message, actual_first_message)

        export_path = generate_export_path(self.fake_data_path_pickle,
                                           "a_file.p")
        expected_second_message = (
            f"4 addresses have been parsed.\n"
            f"The parsed addresses are outputted here: {export_path}")
        actual_second_message = self._caplog.records[1].message
        self.assertEqual(expected_second_message, actual_second_message)

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_integration_no_logging(self):
        with self._caplog.at_level(logging.INFO):
            create_pickle_file(self.fake_data_path_pickle,
                               predict_container=True)
            parse.main([
                self.a_fasttext_model_type,
                self.fake_data_path_pickle,
                self.pickle_p_export_filename,
                "--device",
                self.cpu_device,
                "--log",
                "False",
            ])
        self.assertEqual(0, len(self._caplog.records))

    @skipIf(not torch.cuda.is_available(), "no gpu available")
    def test_integration_attention_model(self):
        create_pickle_file(self.fake_data_path_pickle, predict_container=True)

        parse.main([
            self.a_fasttext_att_model_type,
            self.fake_data_path_pickle,
            self.pickle_p_export_filename,
            "--device",
            self.cpu_device,
        ])

        export_path = generate_export_path(self.fake_data_path_pickle,
                                           self.pickle_p_export_filename)
        self.assertTrue(os.path.isfile(export_path))

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_integration_json(self):
        create_pickle_file(self.fake_data_path_pickle, predict_container=True)

        parse.main([
            self.a_fasttext_att_model_type,
            self.fake_data_path_pickle,
            self.json_export_filename,
            "--device",
            self.cpu_device,
        ])

        export_path = generate_export_path(self.fake_data_path_pickle,
                                           self.json_export_filename)
        self.assertTrue(os.path.isfile(export_path))

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_integration_csv(self):
        create_csv_file(self.fake_data_path_csv, predict_container=True)

        parse.main([
            self.a_fasttext_att_model_type,
            self.fake_data_path_csv,
            self.csv_export_filename,
            "--device",
            self.cpu_device,
            "--csv_column_name",
            "Address",
        ])

        export_path = generate_export_path(self.fake_data_path_csv,
                                           self.csv_export_filename)
        self.assertTrue(os.path.isfile(export_path))

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_integration_csv_separator(self):
        sep = ";"
        create_csv_file(self.fake_data_path_csv,
                        predict_container=True,
                        separator=sep)

        parse.main([
            self.a_fasttext_model_type,
            self.fake_data_path_csv,
            self.csv_export_filename,
            "--device",
            self.cpu_device,
            "--csv_column_name",
            "Address",
            "--csv_column_separator",
            sep,
        ])

        export_path = generate_export_path(self.fake_data_path_pickle,
                                           self.csv_export_filename)
        self.assertTrue(os.path.isfile(export_path))

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_ifIsCSVFile_noColumnName_raiseValueError(self):
        create_csv_file(self.fake_data_path_csv, predict_container=True)

        with self.assertRaises(ValueError):
            parse.main([
                self.a_fasttext_model_type,
                self.fake_data_path_csv,
                self.csv_export_filename,
                "--device",
                self.cpu_device,
            ])

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_ifIsNotSupportedFile_raiseValueError(self):
        create_csv_file(self.fake_data_path_csv, predict_container=True)

        with self.assertRaises(ValueError):
            parse.main([
                self.a_fasttext_model_type,
                self.a_unsupported_data_path,
                self.csv_export_filename,
                "--device",
                self.cpu_device,
            ])

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_ifIsNotSupportedExportFile_raiseValueError(self):
        create_csv_file(self.fake_data_path_csv, predict_container=True)

        with self.assertRaises(ValueError):
            parse.main([
                self.a_fasttext_model_type,
                self.fake_data_path_csv,
                self.a_unsupported_data_path,
                "--device",
                self.cpu_device,
            ])

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_ifPathToFakeRetrainModel_thenUseFakeRetrainModel(self):
        with self._caplog.at_level(logging.INFO):
            # We use the default path to fasttext model as a "retrain model path"
            path_to_retrained_model = os.path.join(os.path.expanduser("~"),
                                                   ".cache", "deepparse",
                                                   "fasttext.ckpt")
            create_pickle_file(self.fake_data_path_pickle,
                               predict_container=True)

            parse.main([
                self.a_fasttext_model_type,
                self.fake_data_path_pickle,
                self.pickle_p_export_filename,
                "--device",
                self.cpu_device,
                "--path_to_retrained_model",
                path_to_retrained_model,
            ])

        expected_first_message = (
            f"Parsing dataset file {self.fake_data_path_pickle} using the parser "
            f"FastTextAddressParser")
        actual_first_message = self._caplog.records[0].message
        self.assertEqual(expected_first_message, actual_first_message)

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_ifPathToFastTextRetrainModel_thenUseFastTextRetrainModel(self):
        with self._caplog.at_level(logging.INFO):
            path_to_retrained_model = self.path_to_retrain_fasttext
            create_pickle_file(self.fake_data_path_pickle,
                               predict_container=True)

            parse.main([
                self.a_fasttext_model_type,
                self.fake_data_path_pickle,
                self.pickle_p_export_filename,
                "--device",
                self.cpu_device,
                "--path_to_retrained_model",
                path_to_retrained_model,
            ])

        expected_first_message = (
            f"Parsing dataset file {self.fake_data_path_pickle} using the parser "
            f"FastTextAddressParser")
        actual_first_message = self._caplog.records[0].message
        self.assertEqual(expected_first_message, actual_first_message)

    @skipIf(
        not os.path.exists(
            os.path.join(os.path.expanduser("~"), ".cache", "deepparse",
                         "cc.fr.300.bin")),
        "download of model too long for test in runner",
    )
    def test_ifPathToBPEmbRetrainModel_thenUseBPEmbRetrainModel(self):
        with self._caplog.at_level(logging.INFO):
            path_to_retrained_model = self.path_to_retrain_bpemb
            create_pickle_file(self.fake_data_path_pickle,
                               predict_container=True)

            parse.main([
                self.a_fasttext_model_type,
                self.fake_data_path_pickle,
                self.pickle_p_export_filename,
                "--device",
                self.cpu_device,
                "--path_to_retrained_model",
                path_to_retrained_model,
            ])

        expected_first_message = (
            f"Parsing dataset file {self.fake_data_path_pickle} using the parser "
            f"BPEmbAddressParser")

        # Not the same position as with fasttext due to BPEmb messages
        actual_first_message = self._caplog.records[2].message
        self.assertEqual(expected_first_message, actual_first_message)
示例#45
0
class TestBandersnatchConf(TestCase):
    """
    Tests for the BandersnatchConf singleton class
    """

    tempdir = None
    cwd = None

    def setUp(self) -> None:
        self.cwd = os.getcwd()
        self.tempdir = TemporaryDirectory()
        os.chdir(self.tempdir.name)
        # Hack to ensure each test gets fresh instance if needed
        # We have a dedicated test to ensure we're creating a singleton
        Singleton._instances = {}

    def tearDown(self) -> None:
        if self.tempdir:
            assert self.cwd
            os.chdir(self.cwd)
            self.tempdir.cleanup()
            self.tempdir = None

    def test_is_singleton(self) -> None:
        instance1 = BandersnatchConfig()
        instance2 = BandersnatchConfig()
        self.assertEqual(id(instance1), id(instance2))

    def test_single_config__default__all_sections_present(self) -> None:
        with importlib.resources.path("bandersnatch",
                                      "unittest.conf") as config_file:
            instance = BandersnatchConfig(str(config_file))
            # All default values should at least be present and be the write types
            for section in ["mirror", "plugins", "blocklist"]:
                self.assertIn(section, instance.config.sections())

    def test_single_config__default__mirror__setting_attributes(self) -> None:
        instance = BandersnatchConfig()
        options = [option for option in instance.config["mirror"]]
        options.sort()
        self.assertListEqual(
            options,
            [
                "cleanup",
                "compare-method",
                "directory",
                "global-timeout",
                "hash-index",
                "json",
                "master",
                "release-files",
                "stop-on-error",
                "storage-backend",
                "timeout",
                "verifiers",
                "workers",
            ],
        )

    def test_single_config__default__mirror__setting__types(self) -> None:
        """
        Make sure all default mirror settings will cast to the correct types
        """
        instance = BandersnatchConfig()
        for option, option_type in [
            ("directory", str),
            ("hash-index", bool),
            ("json", bool),
            ("master", str),
            ("stop-on-error", bool),
            ("storage-backend", str),
            ("timeout", int),
            ("global-timeout", int),
            ("workers", int),
            ("compare-method", str),
        ]:
            self.assertIsInstance(
                option_type(instance.config["mirror"].get(option)),
                option_type)

    def test_single_config_custom_setting_boolean(self) -> None:
        with open("test.conf", "w") as testconfig_handle:
            testconfig_handle.write("[mirror]\nhash-index=false\n")
        instance = BandersnatchConfig()
        instance.config_file = "test.conf"
        instance.load_configuration()
        self.assertFalse(instance.config["mirror"].getboolean("hash-index"))

    def test_single_config_custom_setting_int(self) -> None:
        with open("test.conf", "w") as testconfig_handle:
            testconfig_handle.write("[mirror]\ntimeout=999\n")
        instance = BandersnatchConfig()
        instance.config_file = "test.conf"
        instance.load_configuration()
        self.assertEqual(int(instance.config["mirror"]["timeout"]), 999)

    def test_single_config_custom_setting_str(self) -> None:
        with open("test.conf", "w") as testconfig_handle:
            testconfig_handle.write("[mirror]\nmaster=https://foo.bar.baz\n")
        instance = BandersnatchConfig()
        instance.config_file = "test.conf"
        instance.load_configuration()
        self.assertEqual(instance.config["mirror"]["master"],
                         "https://foo.bar.baz")

    def test_multiple_instances_custom_setting_str(self) -> None:
        with open("test.conf", "w") as testconfig_handle:
            testconfig_handle.write("[mirror]\nmaster=https://foo.bar.baz\n")
        instance1 = BandersnatchConfig()
        instance1.config_file = "test.conf"
        instance1.load_configuration()

        instance2 = BandersnatchConfig()
        self.assertEqual(instance2.config["mirror"]["master"],
                         "https://foo.bar.baz")

    def test_validate_config_values(self) -> None:
        default_values = SetConfigValues(
            False,
            "",
            "",
            False,
            "sha256",
            "filesystem",
            False,
            True,
            "hash",
            "",
            False,
        )
        no_options_configparser = configparser.ConfigParser()
        no_options_configparser["mirror"] = {}
        self.assertEqual(default_values,
                         validate_config_values(no_options_configparser))

    def test_validate_config_values_release_files_false_sets_root_uri(
            self) -> None:
        default_values = SetConfigValues(
            False,
            "https://files.pythonhosted.org",
            "",
            False,
            "sha256",
            "filesystem",
            False,
            False,
            "hash",
            "",
            False,
        )
        release_files_false_configparser = configparser.ConfigParser()
        release_files_false_configparser["mirror"] = {"release-files": "false"}
        self.assertEqual(
            default_values,
            validate_config_values(release_files_false_configparser))

    def test_validate_config_values_download_mirror_false_sets_no_fallback(
        self, ) -> None:
        default_values = SetConfigValues(
            False,
            "",
            "",
            False,
            "sha256",
            "filesystem",
            False,
            True,
            "hash",
            "",
            False,
        )
        release_files_false_configparser = configparser.ConfigParser()
        release_files_false_configparser["mirror"] = {
            "download-mirror-no-fallback": "true",
        }
        self.assertEqual(
            default_values,
            validate_config_values(release_files_false_configparser))
class OptimizerCheckpointTest(TestCase):
    batch_size = 20
    epochs = 10

    def setUp(self):
        torch.manual_seed(42)
        self.pytorch_module = nn.Linear(1, 1)
        self.loss_function = nn.MSELoss()
        self.optimizer = torch.optim.Adam(self.pytorch_module.parameters(),
                                          lr=1e-3)
        self.model = Model(self.pytorch_module, self.optimizer,
                           self.loss_function)
        self.temp_dir_obj = TemporaryDirectory()
        self.checkpoint_filename = os.path.join(self.temp_dir_obj.name,
                                                'my_checkpoint_{epoch}.optim')

    def tearDown(self):
        self.temp_dir_obj.cleanup()

    def test_integration(self):
        train_gen = some_data_generator(OptimizerCheckpointTest.batch_size)
        valid_gen = some_data_generator(OptimizerCheckpointTest.batch_size)
        checkpointer = OptimizerCheckpoint(self.checkpoint_filename, period=1)
        self.model.fit_generator(train_gen,
                                 valid_gen,
                                 epochs=OptimizerCheckpointTest.epochs,
                                 steps_per_epoch=5,
                                 callbacks=[checkpointer])

    def test_checkpoints(self):
        checkpointer = OptimizerCheckpoint(self.checkpoint_filename, period=1)
        self._test_checkpointer(checkpointer)

    def _test_checkpointer(self, checkpointer):
        optimizer_states = {}
        generator = some_data_generator(OptimizerCheckpointTest.batch_size)

        checkpointer.set_params({
            'epochs': OptimizerCheckpointTest.epochs,
            'steps': 1
        })
        checkpointer.set_model(self.model)
        checkpointer.on_train_begin({})
        for epoch in range(1, OptimizerCheckpointTest.epochs + 1):
            checkpointer.on_epoch_begin(epoch, {})
            checkpointer.on_batch_begin(1, {})
            loss = self._update_model(generator)
            checkpointer.on_batch_end(
                1, {
                    'batch': 1,
                    'size': OptimizerCheckpointTest.batch_size,
                    'loss': loss
                })
            checkpointer.on_epoch_end(epoch, {
                'epoch': epoch,
                'loss': loss,
                'val_loss': 1
            })
            filename = self.checkpoint_filename.format(epoch=epoch)
            self.assertTrue(os.path.isfile(filename))
            optimizer_states[epoch] = torch_to_numpy(
                self.optimizer.state_dict(), copy=True)
        checkpointer.on_train_end({})

        self._test_checkpoint(optimizer_states)

    def _update_model(self, generator):
        self.pytorch_module.zero_grad()

        x, y = next(generator)
        pred_y = self.pytorch_module(x)
        loss = self.loss_function(pred_y, y)
        loss.backward()

        self.optimizer.step()

        return float(loss)

    def _test_checkpoint(self, optimizer_states):
        for epoch, epoch_optimizer_state in optimizer_states.items():
            filename = self.checkpoint_filename.format(epoch=epoch)
            self.model.load_optimizer_state(filename)
            saved_optimizer_state = torch_to_numpy(self.optimizer.state_dict())

            self.assertEqual(epoch_optimizer_state, saved_optimizer_state)
示例#47
0
class MyTest(TestCase):
    def setUp(self):
        self.test_dir = TemporaryDirectory()
    def tearDown(self):
        self.test_dir.cleanup()
示例#48
0
class Snapshot(CoreSysAttributes):
    """A signle hassio snapshot."""

    def __init__(self, coresys, tar_file):
        """Initialize a snapshot."""
        self.coresys = coresys
        self._tarfile = tar_file
        self._data = {}
        self._tmp = None
        self._key = None
        self._aes = None

    @property
    def slug(self):
        """Return snapshot slug."""
        return self._data.get(ATTR_SLUG)

    @property
    def sys_type(self):
        """Return snapshot type."""
        return self._data.get(ATTR_TYPE)

    @property
    def name(self):
        """Return snapshot name."""
        return self._data[ATTR_NAME]

    @property
    def date(self):
        """Return snapshot date."""
        return self._data[ATTR_DATE]

    @property
    def protected(self):
        """Return snapshot date."""
        return self._data.get(ATTR_PROTECTED) is not None

    @property
    def addons(self):
        """Return snapshot date."""
        return self._data[ATTR_ADDONS]

    @property
    def addon_list(self):
        """Return a list of addons slugs."""
        return [addon_data[ATTR_SLUG] for addon_data in self.addons]

    @property
    def folders(self):
        """Return list of saved folders."""
        return self._data[ATTR_FOLDERS]

    @property
    def repositories(self):
        """Return snapshot date."""
        return self._data[ATTR_REPOSITORIES]

    @repositories.setter
    def repositories(self, value):
        """Set snapshot date."""
        self._data[ATTR_REPOSITORIES] = value

    @property
    def homeassistant_version(self):
        """Return snapshot homeassistant version."""
        return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)

    @property
    def homeassistant(self):
        """Return snapshot homeassistant data."""
        return self._data[ATTR_HOMEASSISTANT]

    @property
    def size(self):
        """Return snapshot size."""
        if not self.tarfile.is_file():
            return 0
        return round(self.tarfile.stat().st_size / 1048576, 2)  # calc mbyte

    @property
    def is_new(self):
        """Return True if there is new."""
        return not self.tarfile.exists()

    @property
    def tarfile(self):
        """Return path to Snapshot tarfile."""
        return self._tarfile

    def new(self, slug, name, date, sys_type, password=None):
        """Initialize a new snapshot."""
        # init metadata
        self._data[ATTR_SLUG] = slug
        self._data[ATTR_NAME] = name
        self._data[ATTR_DATE] = date
        self._data[ATTR_TYPE] = sys_type

        # Add defaults
        self._data = SCHEMA_SNAPSHOT(self._data)

        # Set password
        if password:
            self._key = password_to_key(password)
            self._aes = AES.new(
                self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
            self._data[ATTR_PROTECTED] = password_for_validating(password)
            self._data[ATTR_CRYPTO] = CRYPTO_AES128

    def set_password(self, password):
        """Set the password for a exists snapshot."""
        if not password:
            return False

        validating = password_for_validating(password)
        if validating != self._data[ATTR_PROTECTED]:
            return False

        self._key = password_to_key(password)
        self._aes = AES.new(self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
        return True

    def _encrypt_data(self, data):
        """Make data secure."""
        if not self._key or data is None:
            return data

        return b64encode(
            self._aes.encrypt(Padding.pad(data.encode(), 16))).decode()

    def _decrypt_data(self, data):
        """Make data readable."""
        if not self._key or data is None:
            return data

        return Padding.unpad(
            self._aes.decrypt(b64decode(data)), 16).decode()

    async def load(self):
        """Read snapshot.json from tar file."""
        if not self.tarfile.is_file():
            _LOGGER.error("No tarfile %s", self.tarfile)
            return False

        def _load_file():
            """Read snapshot.json."""
            with tarfile.open(self.tarfile, "r:") as snapshot:
                json_file = snapshot.extractfile("./snapshot.json")
                return json_file.read()

        # read snapshot.json
        try:
            raw = await self._loop.run_in_executor(None, _load_file)
        except (tarfile.TarError, KeyError) as err:
            _LOGGER.error(
                "Can't read snapshot tarfile %s: %s", self.tarfile, err)
            return False

        # parse data
        try:
            raw_dict = json.loads(raw)
        except json.JSONDecodeError as err:
            _LOGGER.error("Can't read data for %s: %s", self.tarfile, err)
            return False

        # validate
        try:
            self._data = SCHEMA_SNAPSHOT(raw_dict)
        except vol.Invalid as err:
            _LOGGER.error("Can't validate data for %s: %s", self.tarfile,
                          humanize_error(raw_dict, err))
            return False

        return True

    async def __aenter__(self):
        """Async context to open a snapshot."""
        self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))

        # create a snapshot
        if not self.tarfile.is_file():
            return self

        # extract a exists snapshot
        def _extract_snapshot():
            """Extract a snapshot."""
            with tarfile.open(self.tarfile, "r:") as tar:
                tar.extractall(path=self._tmp.name)

        await self._loop.run_in_executor(None, _extract_snapshot)

    async def __aexit__(self, exception_type, exception_value, traceback):
        """Async context to close a snapshot."""
        # exists snapshot or exception on build
        if self.tarfile.is_file() or exception_type is not None:
            self._tmp.cleanup()
            return

        # validate data
        try:
            self._data = SCHEMA_SNAPSHOT(self._data)
        except vol.Invalid as err:
            _LOGGER.error("Invalid data for %s: %s", self.tarfile,
                          humanize_error(self._data, err))
            raise ValueError("Invalid config") from None

        # new snapshot, build it
        def _create_snapshot():
            """Create a new snapshot."""
            with tarfile.open(self.tarfile, "w:") as tar:
                tar.add(self._tmp.name, arcname=".")

        try:
            write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
            await self._loop.run_in_executor(None, _create_snapshot)
        except (OSError, json.JSONDecodeError) as err:
            _LOGGER.error("Can't write snapshot: %s", err)
        finally:
            self._tmp.cleanup()

    async def store_addons(self, addon_list=None):
        """Add a list of add-ons into snapshot."""
        addon_list = addon_list or self._addons.list_installed

        async def _addon_save(addon):
            """Task to store a add-on into snapshot."""
            addon_file = SecureTarFile(
                Path(self._tmp.name, f"{addon.slug}.tar.gz"),
                'w', key=self._key)

            # Take snapshot
            if not await addon.snapshot(addon_file):
                _LOGGER.error("Can't make snapshot from %s", addon.slug)
                return

            # Store to config
            self._data[ATTR_ADDONS].append({
                ATTR_SLUG: addon.slug,
                ATTR_NAME: addon.name,
                ATTR_VERSION: addon.version_installed,
                ATTR_SIZE: addon_file.size,
            })

        # Run tasks
        tasks = [_addon_save(addon) for addon in addon_list]
        if tasks:
            await asyncio.wait(tasks, loop=self._loop)

    async def restore_addons(self, addon_list=None):
        """Restore a list add-on from snapshot."""
        if not addon_list:
            addon_list = []
            for addon_slug in self.addon_list:
                addon = self._addons.get(addon_slug)
                if addon:
                    addon_list.append(addon)

        async def _addon_restore(addon):
            """Task to restore a add-on into snapshot."""
            addon_file = SecureTarFile(
                Path(self._tmp.name, f"{addon.slug}.tar.gz"),
                'r', key=self._key)

            # If exists inside snapshot
            if not addon_file.path.exists():
                _LOGGER.error("Can't find snapshot for %s", addon.slug)
                return

            # Performe a restore
            if not await addon.restore(addon_file):
                _LOGGER.error("Can't restore snapshot for %s", addon.slug)
                return

        # Run tasks
        tasks = [_addon_restore(addon) for addon in addon_list]
        if tasks:
            await asyncio.wait(tasks, loop=self._loop)

    async def store_folders(self, folder_list=None):
        """Backup hassio data into snapshot."""
        folder_list = set(folder_list or ALL_FOLDERS)

        def _folder_save(name):
            """Intenal function to snapshot a folder."""
            slug_name = name.replace("/", "_")
            tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
            origin_dir = Path(self._config.path_hassio, name)

            # Check if exsits
            if not origin_dir.is_dir():
                _LOGGER.warning("Can't find snapshot folder %s", name)
                return

            # Take snapshot
            try:
                _LOGGER.info("Snapshot folder %s", name)
                with SecureTarFile(tar_name, 'w', key=self._key) as tar_file:
                    tar_file.add(origin_dir, arcname=".")

                _LOGGER.info("Snapshot folder %s done", name)
                self._data[ATTR_FOLDERS].append(name)
            except (tarfile.TarError, OSError) as err:
                _LOGGER.warning("Can't snapshot folder %s: %s", name, err)

        # Run tasks
        tasks = [self._loop.run_in_executor(None, _folder_save, folder)
                 for folder in folder_list]
        if tasks:
            await asyncio.wait(tasks, loop=self._loop)

    async def restore_folders(self, folder_list=None):
        """Backup hassio data into snapshot."""
        folder_list = set(folder_list or self.folders)

        def _folder_restore(name):
            """Intenal function to restore a folder."""
            slug_name = name.replace("/", "_")
            tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
            origin_dir = Path(self._config.path_hassio, name)

            # Check if exists inside snapshot
            if not tar_name.exists():
                _LOGGER.warning("Can't find restore folder %s", name)
                return

            # Clean old stuff
            if origin_dir.is_dir():
                remove_folder(origin_dir)

            # Performe a restore
            try:
                _LOGGER.info("Restore folder %s", name)
                with SecureTarFile(tar_name, 'r', key=self._key) as tar_file:
                    tar_file.extractall(path=origin_dir)
                _LOGGER.info("Restore folder %s done", name)
            except (tarfile.TarError, OSError) as err:
                _LOGGER.warning("Can't restore folder %s: %s", name, err)

        # Run tasks
        tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
                 for folder in folder_list]
        if tasks:
            await asyncio.wait(tasks, loop=self._loop)

    def store_homeassistant(self):
        """Read all data from homeassistant object."""
        self.homeassistant[ATTR_VERSION] = self._homeassistant.version
        self.homeassistant[ATTR_WATCHDOG] = self._homeassistant.watchdog
        self.homeassistant[ATTR_BOOT] = self._homeassistant.boot
        self.homeassistant[ATTR_WAIT_BOOT] = self._homeassistant.wait_boot

        # Custom image
        if self._homeassistant.is_custom_image:
            self.homeassistant[ATTR_IMAGE] = self._homeassistant.image
            self.homeassistant[ATTR_LAST_VERSION] = \
                self._homeassistant.last_version

        # API/Proxy
        self.homeassistant[ATTR_PORT] = self._homeassistant.api_port
        self.homeassistant[ATTR_SSL] = self._homeassistant.api_ssl
        self.homeassistant[ATTR_PASSWORD] = \
            self._encrypt_data(self._homeassistant.api_password)

    def restore_homeassistant(self):
        """Write all data to homeassistant object."""
        self._homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
        self._homeassistant.boot = self.homeassistant[ATTR_BOOT]
        self._homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]

        # Custom image
        if self.homeassistant.get(ATTR_IMAGE):
            self._homeassistant.image = self.homeassistant[ATTR_IMAGE]
            self._homeassistant.last_version = \
                self.homeassistant[ATTR_LAST_VERSION]

        # API/Proxy
        self._homeassistant.api_port = self.homeassistant[ATTR_PORT]
        self._homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
        self._homeassistant.api_password = \
            self._decrypt_data(self.homeassistant[ATTR_PASSWORD])

        # save
        self._homeassistant.save_data()

    def store_repositories(self):
        """Store repository list into snapshot."""
        self.repositories = self._config.addons_repositories

    def restore_repositories(self):
        """Restore repositories from snapshot.

        Return a coroutine.
        """
        return self._addons.load_repositories(self.repositories)
class TestWhitelistProject(TestCase):
    """
    Tests for the bandersnatch filtering classes
    """

    tempdir = None
    cwd = None

    def setUp(self) -> None:
        self.cwd = os.getcwd()
        self.tempdir = TemporaryDirectory()
        bandersnatch.storage.loaded_storage_plugins = defaultdict(list)
        os.chdir(self.tempdir.name)

    def tearDown(self) -> None:
        if self.tempdir:
            assert self.cwd
            os.chdir(self.cwd)
            self.tempdir.cleanup()
            self.tempdir = None

    def test__plugin__loads__explicitly_enabled(self) -> None:
        mock_config(contents="""\
[plugins]
enabled =
    whitelist_project
""")

        plugins = bandersnatch.filter.LoadedFilters().filter_project_plugins()
        names = [plugin.name for plugin in plugins]
        self.assertListEqual(names, ["whitelist_project"])
        self.assertEqual(len(plugins), 1)

    def test__plugin__loads__default(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem

[plugins]
""")

        plugins = bandersnatch.filter.LoadedFilters().filter_project_plugins()
        names = [plugin.name for plugin in plugins]
        self.assertNotIn("whitelist_project", names)

    def test__filter__matches__package(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem

[plugins]
enabled =
    whitelist_project

[whitelist]
packages =
    foo
""")

        mirror = Mirror(Path("."), Master(url="https://foo.bar.com"))
        mirror.packages_to_sync = {"foo": ""}
        mirror._filter_packages()

        self.assertIn("foo", mirror.packages_to_sync.keys())

    def test__filter__nomatch_package(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem

[plugins]
enabled =
    whitelist_project

[whitelist]
packages =
    foo
""")

        mirror = Mirror(Path("."), Master(url="https://foo.bar.com"))
        mirror.packages_to_sync = {"foo": "", "foo2": ""}
        mirror._filter_packages()

        self.assertIn("foo", mirror.packages_to_sync.keys())
        self.assertNotIn("foo2", mirror.packages_to_sync.keys())
示例#50
0
class TestBandersnatchFilter(TestCase):
    """
    Tests for the bandersnatch filtering classes
    """

    tempdir = None
    cwd = None

    def setUp(self) -> None:
        self.cwd = os.getcwd()
        self.tempdir = TemporaryDirectory()
        os.chdir(self.tempdir.name)
        sys.stderr.write(self.tempdir.name)
        sys.stderr.flush()

    def tearDown(self) -> None:
        if self.tempdir:
            assert self.cwd
            os.chdir(self.cwd)
            self.tempdir.cleanup()
            self.tempdir = None

    def test__filter_project_plugins__loads(self) -> None:
        mock_config(
            """\
[plugins]
enabled = all
"""
        )
        builtin_plugin_names = [
            "blocklist_project",
            "regex_project",
            "allowlist_project",
        ]

        plugins = LoadedFilters().filter_project_plugins()
        names = [plugin.name for plugin in plugins]
        for name in builtin_plugin_names:
            self.assertIn(name, names)

    def test__filter_release_plugins__loads(self) -> None:
        mock_config(
            """\
[plugins]
enabled = all
"""
        )
        builtin_plugin_names = [
            "blocklist_release",
            "prerelease_release",
            "regex_release",
            "latest_release",
        ]

        plugins = LoadedFilters().filter_release_plugins()
        names = [plugin.name for plugin in plugins]
        for name in builtin_plugin_names:
            self.assertIn(name, names)

    def test__filter_no_plugin(self) -> None:
        mock_config(
            """\
[plugins]
enabled =
"""
        )

        plugins = LoadedFilters().filter_release_plugins()
        self.assertEqual(len(plugins), 0)

        plugins = LoadedFilters().filter_project_plugins()
        self.assertEqual(len(plugins), 0)

    def test__filter_base_clases(self) -> None:
        """
        Test the base filter classes
        """

        plugin = Filter()
        self.assertEqual(plugin.name, "filter")
        try:
            plugin.initialize_plugin()
            error = False
        except Exception:
            error = True
        self.assertFalse(error)

        plugin = FilterReleasePlugin()
        self.assertIsInstance(plugin, Filter)
        self.assertEqual(plugin.name, "release_plugin")
        try:
            plugin.filter({})
            error = False
        except Exception:
            error = True
        self.assertFalse(error)

        plugin = FilterProjectPlugin()
        self.assertIsInstance(plugin, Filter)
        self.assertEqual(plugin.name, "project_plugin")
        try:
            result = plugin.check_match(key="value")
            error = False
            self.assertIsInstance(result, bool)
        except Exception:
            error = True
        self.assertFalse(error)

    def test_deprecated_keys(self) -> None:
        with open("test.conf", "w") as f:
            f.write("[allowlist]\npackages=foo\n[blocklist]\npackages=bar\n")
        instance = BandersnatchConfig()
        instance.config_file = "test.conf"
        instance.load_configuration()
        plugin = Filter()
        assert plugin.allowlist.name == "allowlist"
        assert plugin.blocklist.name == "blocklist"

    def test__filter_project_blocklist_allowlist__pep503_normalize(self) -> None:
        mock_config(
            """\
[plugins]
enabled =
    blocklist_project
    allowlist_project

[blocklist]
packages =
    SampleProject
    trove----classifiers

[allowlist]
packages =
    SampleProject
    trove----classifiers
"""
        )

        plugins = {
            plugin.name: plugin for plugin in LoadedFilters().filter_project_plugins()
        }

        self.assertTrue(plugins["blocklist_project"].check_match(name="sampleproject"))
        self.assertTrue(
            plugins["blocklist_project"].check_match(name="trove-classifiers")
        )
        self.assertFalse(plugins["allowlist_project"].check_match(name="sampleproject"))
        self.assertFalse(
            plugins["allowlist_project"].check_match(name="trove-classifiers")
        )
示例#51
0
class CommitDropTest(TestCase):
    def test_drop_single_commit_by_hash(self):
        hash1 = self.create_commit('commit 1')
        self.create_commit('commit 2')

        self.drop(hash1)

        self.assertEqual(['commit 2'], self.git.log())

    def test_drop_commits_by_desc(self):
        self.create_commit('commit 1 is good')
        self.create_commit('commit 2 is bad')
        self.create_commit('commit 3 is good')
        self.create_commit('commit 4 is bad')

        self.drop('.*bad')

        expected = ['commit 1 is good', 'commit 3 is good']
        self.assertEqual(expected, self.git.log())

    def test_drop_sequential_commits_in_single_plugin_instance(self):
        self.create_commit('commit 1')
        hash2 = self.create_commit('commit 2')
        hash3 = self.create_commit('commit 3')
        hash4 = self.create_commit('commit 4')
        self.create_commit('commit 5')

        self.drop(','.join((hash2, hash3, hash4)))

        expected = ['commit 1', 'commit 5']
        self.assertEqual(expected, self.git.log())

    def test_drop_sequential_commits_in_multiple_plugin_instances(self):
        self.create_commit('commit 1')
        hash2 = self.create_commit('commit 2')
        hash3 = self.create_commit('commit 3')
        hash4 = self.create_commit('commit 4')
        self.create_commit('commit 5')

        self.drop(hash2, hash3, hash4)

        expected = ['commit 1', 'commit 5']
        self.assertEqual(expected, self.git.log())

    def test_drop_nonsequential_commits(self):
        self.create_commit('commit 1')
        hash2 = self.create_commit('commit 2')
        self.create_commit('commit 3')
        hash4 = self.create_commit('commit 4')

        self.drop(','.join((hash2, hash4)))

        expected = ['commit 1', 'commit 3']
        self.assertEqual(expected, self.git.log())

    def test_drop_head(self):
        self.create_commit('first')
        self.create_commit('middle')
        hash_last = self.create_commit('last')

        self.drop(hash_last)

        self.assertEqual(['first', 'middle'], self.git.log())

    def test_drop_merge_commit(self):
        initial_hash = self.create_commit('initial')
        self.create_commit('branch A')
        self.hg.checkout(initial_hash)
        self.create_commit('branch B')
        self.hg.merge()
        merge_hash = self.create_commit('merge to drop')
        self.create_commit('last')

        self.drop(merge_hash)

        expected_commits = ['initial', 'branch A', 'branch B', 'last']
        self.assertEqual(expected_commits, self.git.log())
        self.assertEqual(['branch B', 'branch A'], self.git_parents('last'))

    def test_drop_different_commits_in_multiple_plugin_instances(self):
        self.create_commit('good commit')
        bad_hash = self.create_commit('bad commit')
        self.create_commit('awful commit')
        self.create_commit('another good commit')

        self.drop('^awful.*', bad_hash)

        expected = ['good commit', 'another good commit']
        self.assertEqual(expected, self.git.log())

    def test_drop_same_commit_in_multiple_plugin_instances(self):
        self.create_commit('good commit')
        bad_hash = self.create_commit('bad commit')
        self.create_commit('another good commit')

        self.drop('^bad.*', bad_hash)

        expected = ['good commit', 'another good commit']
        self.assertEqual(expected, self.git.log())

    def setUp(self):
        self.tempdir = TemporaryDirectory()

        self.hg = HgDriver(Path(self.tempdir.name) / 'hgrepo')
        self.hg.init()

        self.git = GitDriver(Path(self.tempdir.name) / 'gitrepo')
        self.git.init()

        self.export = ExportDriver(self.hg.repodir, self.git.repodir)

    def tearDown(self):
        self.tempdir.cleanup()

    def create_commit(self, message):
        self.write_file_data('Data for %r.' % message)
        return self.hg.commit(message)

    def write_file_data(self, data, filename='test_file.txt'):
        path = self.hg.repodir / filename
        with path.open('w') as f:
            print(data, file=f)

    def drop(self, *spec):
        self.export.run_with_drop(*spec)

    def git_parents(self, message):
        matches = self.git.grep_log(message)
        if len(matches) != 1:
            raise Exception('No unique commit with message %r.' % message)
        subject, parents = self.git.details(matches[0])
        return [self.git.details(p)[0] for p in parents]
示例#52
0
class TestAllowlistRelease(TestCase):
    """
    Tests for the bandersnatch filtering classes
    """
    def setUp(self) -> None:
        self.cwd = os.getcwd()
        self.tempdir = TemporaryDirectory()
        os.chdir(self.tempdir.name)

    def tearDown(self) -> None:
        if self.tempdir:
            assert self.cwd
            os.chdir(self.cwd)
            self.tempdir.cleanup()

    def test__plugin__loads__explicitly_enabled(self) -> None:
        mock_config("""\
[plugins]
enabled =
    allowlist_release
""")

        plugins = bandersnatch.filter.LoadedFilters().filter_release_plugins()
        names = [plugin.name for plugin in plugins]
        self.assertListEqual(names, ["allowlist_release"])
        self.assertEqual(len(plugins), 1)

    def test__plugin__doesnt_load__explicitly__disabled(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_package
""")

        plugins = bandersnatch.filter.LoadedFilters().filter_release_plugins()
        names = [plugin.name for plugin in plugins]
        self.assertNotIn("allowlist_release", names)

    def test__filter__matches__release(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_release
[allowlist]
packages =
    foo==1.2.0
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        pkg = Package("foo", 1)
        pkg._metadata = {
            "info": {
                "name": "foo"
            },
            "releases": {
                "1.2.0": {},
                "1.2.1": {}
            },
        }

        pkg.filter_all_releases(mirror.filters.filter_release_plugins())

        self.assertEqual(pkg.releases, {"1.2.0": {}})

    def test__filter__matches__release__commented__inline(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_release
[allowlist]
packages =
    foo==1.2.0      # some inline comment
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        pkg = Package("foo", 1)
        pkg._metadata = {
            "info": {
                "name": "foo"
            },
            "releases": {
                "1.2.0": {},
                "1.2.1": {}
            },
        }

        pkg.filter_all_releases(mirror.filters.filter_release_plugins())

        self.assertEqual(pkg.releases, {"1.2.0": {}})

    def test__dont__filter__prereleases(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_release
[allowlist]
packages =
    foo<=1.2.0
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        pkg = Package("foo", 1)
        pkg._metadata = {
            "info": {
                "name": "foo"
            },
            "releases": {
                "1.1.0a2": {},
                "1.1.1beta1": {},
                "1.2.0": {},
                "1.2.1": {},
                "1.2.2alpha3": {},
                "1.2.3rc1": {},
            },
        }

        pkg.filter_all_releases(mirror.filters.filter_release_plugins())

        self.assertEqual(pkg.releases, {
            "1.1.0a2": {},
            "1.1.1beta1": {},
            "1.2.0": {}
        })

    def test__casing__no__affect(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_release
[allowlist]
packages =
    Foo<=1.2.0
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        pkg = Package("foo", 1)
        pkg._metadata = {
            "info": {
                "name": "foo"
            },
            "releases": {
                "1.2.0": {},
                "1.2.1": {}
            },
        }

        pkg.filter_all_releases(mirror.filters.filter_release_plugins())

        self.assertEqual(pkg.releases, {"1.2.0": {}})
示例#53
0
class LEMSModel(
        sciunit.Model,
        cap.Runnable,
):
    """A generic LEMS model"""
    def __init__(self, LEMS_file_path, name=None, backend=None, attrs=None):

        #for base in cls.__bases__:
        #    sciunit.Model.__init__()
        if name is None:
            name = os.path.split(LEMS_file_path)[1].split('.')[0]
        self.name = name
        #sciunit.Modelsuper(LEMSModel,self).__init__(name=name)
        self.attrs = attrs if attrs else {}
        self.orig_lems_file_path = os.path.abspath(LEMS_file_path)
        assert os.path.isfile(self.orig_lems_file_path),\
            "'%s' is not a file" % self.orig_lems_file_path
        # Use original path unless create_lems_file is called
        self.lems_file_path = self.orig_lems_file_path
        self.run_defaults = pynml.DEFAULTS
        self.run_defaults['nogui'] = True
        self.run_params = {}
        self.last_run_params = None
        self.skip_run = False
        self.rerun = True  # Needs to be rerun since it hasn't been run yet!
        self.unpicklable = []
        if backend is None:
            backend = 'jNeuroML'
        self.set_backend(backend)

    def get_backend(self):
        return self._backend

    def set_backend(self, backend):
        if isinstance(backend, str):
            name = backend
            args = []
            kwargs = {}
        elif isinstance(backend, (tuple, list)):
            name = ''
            args = []
            kwargs = {}
            for i in range(len(backend)):
                if i == 0:
                    name = backend[i]
                else:
                    if isinstance(backend[i], dict):
                        kwargs.update(backend[i])
                    else:
                        args += backend[i]
        else:
            raise TypeError("Backend must be string, tuple, or list")
        if name in available_backends:
            self.backend = name
            self._backend = available_backends[name]()
        elif name is None:
            # The base class should not be called.
            raise Exception(("A backend (e.g. 'jNeuroML' or 'NEURON') "
                             "must be selected"))
        else:
            raise Exception("Backend %s not found in backends.py" \
                            % name)
        self._backend.model = self
        self._backend.init_backend(*args, **kwargs)

    def get_nml_paths(self, lems_tree=None, absolute=True, original=False):
        if not lems_tree:
            lems_tree = etree.parse(self.lems_file_path)
        nml_paths = [x.attrib['file'] for x in \
                     lems_tree.xpath("Include[contains(@file, '.nml')]")]
        if absolute:  # Turn into absolute paths
            lems_file_path = self.orig_lems_file_path if original \
                                                      else self.lems_file_path
            nml_paths = [os.path.join(os.path.dirname(lems_file_path),x) \
                         for x in nml_paths]
        return nml_paths

    def create_lems_file_copy(self, name=None, use=True):
        """Creates a temporary, writable copy of the original LEMS file so that
        e.g. edits can be made to it programatically before simulation
        """
        if name is None:
            name = self.name
        if not hasattr(self, 'temp_dir'):
            self.temp_dir = TemporaryDirectory()
        lems_copy_path = os.path.join(self.temp_dir.name, '%s.xml' % name)
        shutil.copy2(self.orig_lems_file_path, lems_copy_path)
        nml_paths = self.get_nml_paths(original=True)
        for orig_nml_path in nml_paths:
            new_nml_path = os.path.join(self.temp_dir.name,
                                        os.path.basename(orig_nml_path))
            shutil.copy2(orig_nml_path, new_nml_path)
        if self.attrs:
            self.set_lems_attrs(self.attrs, path=lems_copy_path)
        if use:
            self.lems_file_path = lems_copy_path
        return lems_copy_path

    def set_attrs(self, attrs):
        self._backend.set_attrs(**attrs)

    def inject_square_current(self, current):
        self._backend.inject_square_current(current)

    def set_lems_attrs(self, attrs, path=None):
        if path is None:
            path = self.lems_file_path
        paths = [path] + self.get_nml_paths()
        for p in paths:
            tree = etree.parse(p)
            for key1, value1 in attrs.items():
                nodes = tree.findall(key1)
                for node in nodes:
                    for key2, value2 in value1.items():
                        node.attrib[key2] = value2
            tree.write(p)

    def run(self, rerun=None, **run_params):
        if rerun is None:
            rerun = self.rerun
        self.set_run_params(**run_params)
        for key, value in self.run_defaults.items():
            if key not in self.run_params:
                self.set_run_params(**{key: value})
        #if (not rerun) and hasattr(self,'last_run_params') and \
        #   self.run_params == self.last_run_params:
        #    print("Same run_params; skipping...")
        #    return

        self.results = self._backend.local_run()
        self.last_run_params = deepcopy(self.run_params)
        #self.rerun = False
        # Reset run parameters so the next test has to pass its own
        # run parameters and not use the same ones
        self.run_params = {}

    def set_run_params(self, **params):
        self._backend.set_run_params(**params)

    def set_lems_run_params(self, verbose=False):
        from lxml import etree
        from neuroml import nml
        lems_tree = etree.parse(self.lems_file_path)
        trees = {self.lems_file_path: lems_tree}

        # Edit LEMS files.
        nml_paths = self.get_nml_paths(lems_tree=lems_tree)
        trees.update({x: nml.nml.parsexml_(x) for x in nml_paths})

        # Edit NML files.
        for file_path, tree in trees.items():
            for key, value in self.run_params.items():
                if key == 'injected_square_current':
                    pulse_generators = tree.findall('pulseGenerator')
                    for pg in pulse_generators:
                        for attr in ['delay', 'duration', 'amplitude']:
                            if attr in value:
                                if verbose:
                                    print('Setting %s to %f' %
                                          (attr, value[attr]))
                                pg.attrib[attr] = '%s' % value[attr]

            tree.write(file_path)

    def inject_square_current(self, current):
        self._backend.inject_square_current(current)

    @property
    def state(self):
        return self._state(
            keys=['name', 'url', 'attrs', 'run_params', 'backend'])

    def __del__(self):
        if hasattr(self, 'temp_dir'):  # is not type(None):
            self.temp_dir.cleanup()  # Delete the temporary directory
            s = super(LEMSModel, self)
            if hasattr(s, '__del__'):
                s.__del__()
示例#54
0
class GitRepoMainTestCase(TestGitPopenMockupMixin):
    def setup_method(self, method):
        self.log.info('GitRepoMainTestCase.setup_method({})'.format(method))
        self.tempdir = TemporaryDirectory()
        RepositoryService.service_map = {
            'github': RepositoryMockup,
            'gitlab': RepositoryMockup,
            'bitbucket': RepositoryMockup,
        }
        RepositoryService.command_map = {
            'hub': 'github',
            'lab': 'gitlab',
            'bb': 'bitbucket',
        }
        # setup git command mockup
        self.setup_git_popen()

    def teardown_method(self, method):
        self.log.info('GitRepoMainTestCase.teardown_method({})'.format(method))
        RepositoryService._current = RepositoryMockup(c={})
        self.tempdir.cleanup()

    def setup_args(self, d, args={}):
        cli_args = {
            '--force': False,
            '--help': False,
            '--path': '.',
            '--verbose': 4,
            '--no-clone': False,
            '--tracking': 'master',
            '--alone': False,
            '--add': False,
            '--clone': False,
            '<name>': None,
            '<branch>': None,
            '<target>': self.target,
            '<target_repo>': None,
            '<user>/<repo>': '',
            'add': False,
            'clone': False,
            'create': False,
            'delete': False,
            'fork': False,
            'gist': False,
            'fetch': False,
            'fork': False,
            'list': False,
            'ls': False,
            'open': False,
            '--secret': False,
            '<description>': None,
            '--message': None,
            '<gist>': None,
            '<gist_file>': None,
            '<gist_path>': [],
            'request': False,
            '<request>': None,
            '<local_branch>': None,
            '<remote_branch>': None,
            '<user>/<repo>': None,
        }
        cli_args.update(d)
        cli_args.update(args)
        return cli_args

    def main_add(self, repo, rc=0, args={}):
        os.mkdir(os.path.join(self.tempdir.name, repo.split('/')[-1]))
        Repo.init(os.path.join(self.tempdir.name, repo.split('/')[-1]))
        assert rc == main(self.setup_args({
            'add': True,
            '<user>/<repo>': repo,
            '--path': self.tempdir.name
        }, args)), "Non {} result for add".format(rc)
        return RepositoryService._current._did_add

    def main_clone(self, repo, rc=0, args={}):
        assert rc == main(self.setup_args({
            'clone': True,
            '<user>/<repo>': repo,
            '--path': self.tempdir.name
        }, args)), "Non {} result for clone".format(rc)
        return RepositoryService._current._did_clone

    def main_create(self, repo=None, rc=0, args={}):
        if repo:
            repo_path = os.path.join(self.tempdir.name, repo.split('/')[-1])
            os.mkdir(repo_path)
            Repo.init(repo_path)
        assert rc == main(self.setup_args({
            'create': True,
            '<user>/<repo>': repo,
            '--path': self.tempdir.name
        }, args)), "Non {} result for create".format(rc)
        return RepositoryService._current._did_create

    def main_delete(self, repo=None, rc=0, args={}):
        if repo:
            repo_path = os.path.join(self.tempdir.name, repo.split('/')[-1])
            os.mkdir(repo_path)
            Repo.init(repo_path)
        assert rc == main(self.setup_args({
            'delete': True,
            '<user>/<repo>': repo,
            '--path': self.tempdir.name,
        }, args)), "Non {} result for delete".format(rc)
        return RepositoryService._current._did_delete

    def main_fork(self, repo=None, rc=0, args={}):
        assert rc == main(self.setup_args({
            'fork': True,
            '<user>/<repo>': repo,
            '--path': self.tempdir.name
        }, args)), "Non {} result for fork".format(rc)
        return RepositoryService._current._did_fork

    def main_gist_list(self, rc=0, args={}):
        assert rc == main(self.setup_args({
            'gist': True,
            'list': True,
        }, args)), "Non {} result for gist list".format(rc)
        return RepositoryService._current._did_gist_list

    def main_gist_ls(self, rc=0, args={}):
        assert rc == main(self.setup_args({
            'gist': True,
            'ls': True,
        }, args)), "Non {} result for gist ls".format(rc)
        return RepositoryService._current._did_gist_list

    def main_gist_clone(self, rc=0, args={}):
        assert rc == main(self.setup_args({
            'gist': True,
            'clone': True,
            '--path': self.tempdir.name
        }, args)), "Non {} result for gist clone".format(rc)
        return RepositoryService._current._did_gist_clone

    def main_gist_fetch(self, rc=0, args={}):
        assert rc == main(self.setup_args({
            'gist': True,
            'fetch': True,
        }, args)), "Non {} result for gist fetch".format(rc)
        return RepositoryService._current._did_gist_fetch

    def main_gist_create(self, rc=0, args={}):
        assert rc == main(self.setup_args({
            'gist': True,
            'create': True,
        }, args)), "Non {} result for gist create".format(rc)
        return RepositoryService._current._did_gist_create

    def main_gist_delete(self, rc=0, args={}):
        assert rc == main(self.setup_args({
            'gist': True,
            'delete': True,
        }, args)), "Non {} result for gist delete".format(rc)
        return RepositoryService._current._did_gist_delete

    def main_request_list(self, repo=None, rc=0, args={}):
        assert rc == main(self.setup_args({
            'request': True,
            'list': True,
            '<user>/<repo>': repo,
            '--clone': True,
            '--path': self.tempdir.name
        }, args)), "Non {} result for request list".format(rc)
        return RepositoryService._current._did_request_list

    def main_request_fetch(self, repo=None, rc=0, args={}):
        assert rc == main(self.setup_args({
            'request': True,
            'fetch': True,
            '<user>/<repo>': repo,
            '--clone': True,
            '--path': self.tempdir.name
        }, args)), "Non {} result for request fetch".format(rc)
        return RepositoryService._current._did_request_fetch

    def main_request_create(self, repo=None, rc=0, args={}):
        assert rc == main(self.setup_args({
            'request': True,
            'create': True,
            '<user>/<repo>': repo,
            '--path': self.tempdir.name
        }, args)), "Non {} result for request create".format(rc)
        return RepositoryService._current._did_request_create

    def main_open(self, repo=None, rc=0, args={}):
        assert rc == main(self.setup_args({
            'open': True,
            '<user>/<repo>': repo,
            '--path': self.tempdir.name
        }, args)), "Non {} result for open".format(rc)
        return RepositoryService._current._did_open

    def main_config(self, target, rc=0, args={}):
        assert rc == main(self.setup_args({
            'config': True,
            '--config': os.path.join(self.tempdir.name, 'gitconfig')
        }, args)), "Non {} result for config".format(rc)
        with open(os.path.join(self.tempdir.name, 'gitconfig')) as f:
            return f.readlines()

    def main_noop(self, repo, rc=1, args={}):
        assert rc == main(self.setup_args({
            '<user>/<repo>': repo,
            '--path': self.tempdir.name
        }, args)), "Non {} result for no-action".format(rc)
示例#55
0
class TestMailStorage(TestCase):
    @classmethod
    def setUpClass(cls) -> None:
        logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
        tracemalloc.start()

    def setUp(self) -> None:
        self.secret = os.urandom(32)
        self.dir = TemporaryDirectory()
        self.home = Path(self.dir.name)
        self.facade = Facade(self.home,
                             self.secret,
                             SetupChurchPortfolio().perform(
                                 ChurchData(**Generate.church_data()[0])),
                             role=Const.A_ROLE_PRIMARY,
                             server=True)

    def tearDown(self) -> None:
        self.facade.close()
        self.dir.cleanup()

    @run_async
    async def test_save(self):
        names = [
            PurePosixPath("/", Generate.filename(".doc")) for _ in range(10)
        ]
        for filename in names:
            self.assertIsInstance(
                await self.facade.storage.mail.save(filename, StubDocument()),
                uuid.UUID)

    @run_async
    async def test_delete(self):
        names = [
            PurePosixPath("/", Generate.filename(".doc")) for _ in range(10)
        ]
        for filename in names:
            await self.facade.storage.mail.save(filename, StubDocument())

        for filename in names:
            await self.facade.storage.mail.delete(filename)
            self.assertTrue(await
                            self.facade.storage.mail.archive.isfile(filename))

    @run_async
    async def test_update(self):
        names = [
            PurePosixPath("/", Generate.filename(".doc")) for _ in range(10)
        ]
        for filename in names:
            await self.facade.storage.mail.save(filename, StubDocument())

        for filename in names:
            self.assertIsInstance(
                await self.facade.storage.mail.update(filename,
                                                      StubDocument()),
                uuid.UUID)

    @run_async
    async def test_issuer(self):
        with self.assertRaises(DeprecationWarning):
            await self.facade.storage.mail.issuer(uuid.uuid4())

    @run_async
    async def test_search(self):
        names = [
            PurePosixPath("/", Generate.filename(".doc")) for _ in range(10)
        ]
        for filename in names:
            await self.facade.storage.mail.save(filename, StubDocument())

        paths = (await self.facade.storage.mail.search()).values()
        for filename in names:
            self.assertIn(filename, paths)
示例#56
0
    def post():
        parser = reqparse.RequestParser()
        parser.add_argument('common_name',
                            type=str,
                            required=True,
                            help='CommonName must be set')
        parser.add_argument('first_name',
                            type=str,
                            required=True,
                            help='First name must be set')
        parser.add_argument('last_name',
                            type=str,
                            required=True,
                            help='Last name must be set')
        parser.add_argument('alias',
                            type=str,
                            required=True,
                            help='Alias must be set')
        parser.add_argument('email',
                            type=str,
                            required=True,
                            help='Email must be set')
        parser.add_argument('daystoexpire',
                            type=int,
                            required=True,
                            help='Days to expire must be set')
        parser.add_argument('txt',
                            required=False,
                            location='json',
                            action='append',
                            type=txt_loads,
                            help='TXT must be set')
        args = parser.parse_args()

        if nvs_is_exists('ssh:{}'.format(args.common_name)):
            return {
                'result_status': False,
                'message': 'Required records cannot be created'
            }, 400

        cert_expire = args.daystoexpire

        temp_dir_obj = TemporaryDirectory()

        # make info file
        file_content, index, passwd = make_info_data(args)

        # write info file
        writen = False
        name = None
        for i in range(30):
            name = make_random_name()
            file_path = os.path.join(current_app.config.get('CERTS_FOLDER'),
                                     '{0}.zip'.format(name))
            if not os.path.exists(file_path):
                try:
                    with open(os.path.join(temp_dir_obj.name,
                                           '{0}.info'.format(name)),
                              mode='w') as fd:
                        fd.write(file_content)
                    writen = True
                    break
                except OSError:
                    continue

        if not writen:
            return {
                'result_status': False,
                'message': 'can\'t gen file name'
            }, 400

        # make and write ze file
        ze_data = encrypt(
            gzip.compress(
                '\n'.join([
                    line for line in file_content.split('\n')
                    if len(line) == 0 or line[0] != '#'
                ]).encode(encoding='utf-8'), 9),
            passwd.encode(encoding='utf-8'))

        if (nvs_is_exists('ssl:{}'.format(name))
                or nvs_is_exists('info:{}'.format(index))
                or not (nvs_is_valid('ssl:{}'.format(name))
                        and nvs_is_valid('info:{}'.format(index)))):
            return {
                'result_status': False,
                'message': 'Required records cannot be created'
            }, 400

        try:
            with open(os.path.join(temp_dir_obj.name, '{0}.ze'.format(name)),
                      mode='wb') as fd:
                fd.write(ze_data)
        except OSError:
            return {'result_status': False, 'message': 'can\'t save file'}, 400

        try:
            if not os.path.exists(current_app.config.get('CA_CERTIFICATE', '')) \
                    or not os.path.exists(current_app.config.get('CA_PRIVATE_KEY', '')):
                return {
                    'result_status': False,
                    'message': 'CA haven\'t found'
                }, 400
        except OSError:
            return {
                'result_status': False,
                'message': 'can\'t open files'
            }, 400

        pkey, crt, p12, fingerprint = make_certificate(
            tmp_name=name,
            ca_path=current_app.config.get('CA_CERTIFICATE'),
            ca_priv_key_path=current_app.config.get('CA_PRIVATE_KEY'),
            cn=args.common_name,
            email=args.email,
            uid='info:{0}:{1}'.format(index, passwd),
            days_to_exp=cert_expire)

        code = fingerprint.replace(b':', b'').lower().decode()

        ovpn_content = get_ovpn_content(pkey, crt)

        try:
            with open(os.path.join(temp_dir_obj.name, '{0}.key'.format(name)),
                      mode='wb') as fd:
                fd.write(pkey)
            with open(os.path.join(temp_dir_obj.name, '{0}.crt'.format(name)),
                      mode='wb') as fd:
                fd.write(crt)
            with open(os.path.join(temp_dir_obj.name, '{0}.p12'.format(name)),
                      mode='wb') as fd:
                fd.write(p12)
            with open(os.path.join(temp_dir_obj.name, '{0}.ovpn'.format(name)),
                      mode='w') as fd:
                fd.write(ovpn_content)
        except OSError:
            return {
                'result_status': False,
                'message': 'can\'t save files'
            }, 400

        # Make zip
        try:
            with zipfile.ZipFile(os.path.join(temp_dir_obj.name,
                                              '{0}.zip'.format(name)),
                                 mode='w',
                                 compression=zipfile.ZIP_DEFLATED) as zf:
                for ext in ('key', 'crt', 'p12', 'info', 'ze', 'ovpn'):
                    zf.write(
                        os.path.join(temp_dir_obj.name,
                                     '{0}.{1}'.format(name, ext)),
                        '{0}.{1}'.format(name, ext))
        except OSError:
            return {
                'result_status': False,
                'message': 'can\'t create zip file'
            }, 400

        try:
            shutil.move(
                os.path.join(temp_dir_obj.name, '{0}.zip'.format(name)),
                os.path.join(current_app.config.get('CERTS_FOLDER'),
                             '{0}.zip'.format(name)))
        except OSError:
            return {
                'result_status': False,
                'message': 'can\'t move zip file'
            }, 400

        try:
            shutil.move(
                os.path.join(temp_dir_obj.name, '{0}.p12'.format(name)),
                os.path.join(current_app.config.get('CERTS_FOLDER'),
                             '{0}.p12'.format(name)))
        except OSError:
            return {
                'result_status': False,
                'message': 'can\'t move p12 file'
            }, 400

        temp_dir_obj.cleanup()

        created, error = update_or_create_nvs(
            'ssh:{}'.format(args.common_name), name, cert_expire + 365)
        if error:
            return {'result_status': False, 'message': format(error)}, 400

        created, error = update_or_create_nvs('ssl:{}'.format(name),
                                              'sha256={}'.format(code),
                                              cert_expire + 365)
        if error:
            return {'result_status': False, 'message': format(error)}, 400

        ze_data_base64 = base64.b64encode(ze_data).decode('utf-8')

        created, error = update_or_create_nvs('info:{}'.format(index),
                                              ze_data_base64,
                                              cert_expire + 365, '', 'base64')
        if error:
            return {'result_status': False, 'message': format(error)}, 400

        return {'result_status': True, 'result': {'name': name, 'value': code}}
示例#57
0
class LocalExecutor(ExperimentExecutor):
    """Local machine experiment executor."""
    def __init__(
        self,
        baseline_rev: str,
        checkpoint_reset: Optional[bool] = False,
        **kwargs,
    ):
        from dvc.repo import Repo

        dvc_dir = kwargs.pop("dvc_dir")
        cache_dir = kwargs.pop("cache_dir")
        super().__init__(baseline_rev, **kwargs)
        self.tmp_dir = TemporaryDirectory()

        # init empty DVC repo (will be overwritten when input is uploaded)
        Repo.init(root_dir=self.tmp_dir.name, no_scm=True)
        logger.debug(
            "Init local executor in dir '%s' with baseline '%s'.",
            self.tmp_dir,
            baseline_rev[:7],
        )
        self.dvc_dir = os.path.join(self.tmp_dir.name, dvc_dir)
        self._config(cache_dir)
        self._tree = LocalTree(self.dvc, {"url": self.dvc.root_dir})
        # override default CACHE_MODE since files must be writable in order
        # to run repro
        self._tree.CACHE_MODE = 0o644
        self.checkpoint_reset = checkpoint_reset

    def _config(self, cache_dir):
        local_config = os.path.join(self.dvc_dir, "config.local")
        logger.debug("Writing experiments local config '%s'", local_config)
        with open(local_config, "w") as fobj:
            fobj.write("[core]\n    no_scm = true\n")
            fobj.write(f"[cache]\n    dir = {cache_dir}")

    @cached_property
    def dvc(self):
        from dvc.repo import Repo

        return Repo(self.dvc_dir)

    @cached_property
    def path_info(self):
        return PathInfo(self.tmp_dir.name)

    @property
    def tree(self):
        return self._tree

    @staticmethod
    def reproduce(dvc_dir, cwd=None, **kwargs):
        """Run dvc repro and return the result."""
        from dvc.repo import Repo
        from dvc.repo.experiments import hash_exp

        unchanged = []

        def filter_pipeline(stages):
            unchanged.extend([
                stage for stage in stages if isinstance(stage, PipelineStage)
            ])

        if cwd:
            old_cwd = os.getcwd()
            os.chdir(cwd)
        else:
            old_cwd = None
            cwd = os.getcwd()

        try:
            logger.debug("Running repro in '%s'", cwd)
            dvc = Repo(dvc_dir)

            # NOTE: for checkpoint experiments we handle persist outs slightly
            # differently than normal:
            #
            # - checkpoint out may not yet exist if this is the first time this
            #   experiment has been run, this is not an error condition for
            #   experiments
            # - at the start of a repro run, we need to remove the persist out
            #   and restore it to its last known (committed) state (which may
            #   be removed/does not yet exist) so that our executor workspace
            #   is not polluted with the (persistent) out from an unrelated
            #   experiment run
            checkpoint = kwargs.pop("checkpoint", False)
            dvc.checkout(allow_missing=checkpoint,
                         force=checkpoint,
                         quiet=checkpoint)
            stages = dvc.reproduce(
                on_unchanged=filter_pipeline,
                allow_missing=checkpoint,
                **kwargs,
            )
        finally:
            if old_cwd is not None:
                os.chdir(old_cwd)

        # ideally we would return stages here like a normal repro() call, but
        # stages is not currently picklable and cannot be returned across
        # multiprocessing calls
        return hash_exp(stages + unchanged)

    def collect_output(self) -> Iterable["PathInfo"]:
        repo_tree = RepoTree(self.dvc)
        yield from self.collect_files(self.tree, repo_tree)

    @staticmethod
    def collect_files(tree: BaseTree, repo_tree: RepoTree):
        for fname in repo_tree.walk_files(repo_tree.root_dir, dvcfiles=True):
            if not repo_tree.isdvc(fname):
                yield tree.path_info / fname.relative_to(repo_tree.root_dir)

    def cleanup(self):
        logger.debug("Removing tmpdir '%s'", self.tmp_dir)
        self.tmp_dir.cleanup()
        super().cleanup()
示例#58
0
class TestSQLiteTLE(unittest.TestCase):
    """Test saving TLE data to a SQLite database."""

    def setUp(self):
        """Create a database instance."""
        from pyorbital.tlefile import SQLiteTLE
        from pyorbital.tlefile import Tle
        from tempfile import TemporaryDirectory

        self.temp_dir = TemporaryDirectory()
        self.db_fname = os.path.join(self.temp_dir.name, 'tle.db')
        self.platforms = {25544: "ISS"}
        self.writer_config = {
            "output_dir": os.path.join(self.temp_dir.name, 'tle_dir'),
            "filename_pattern": "tle_%Y%m%d_%H%M%S.%f.txt",
            "write_name": True,
            "write_always": False
        }
        self.db = SQLiteTLE(self.db_fname, self.platforms, self.writer_config)
        self.tle = Tle('ISS', line1=line1, line2=line2)

    def tearDown(self):
        """Clean temporary files."""
        self.temp_dir.cleanup()

    def test_init(self):
        """Test that the init did what it should have."""
        from pyorbital.tlefile import table_exists, PLATFORM_NAMES_TABLE

        columns = [col.strip() for col in
                   PLATFORM_NAMES_TABLE.strip('()').split(',')]
        num_columns = len(columns)

        self.assertTrue(os.path.exists(self.db_fname))
        self.assertTrue(table_exists(self.db.db, "platform_names"))
        res = self.db.db.execute('select * from platform_names')
        names = [description[0] for description in res.description]
        self.assertEqual(len(names), num_columns)
        for col in columns:
            self.assertTrue(col.split(' ')[0] in names)

    def test_update_db(self):
        """Test updating database with new data."""
        from pyorbital.tlefile import (table_exists, SATID_TABLE,
                                       ISO_TIME_FORMAT)

        # Get the column names
        columns = [col.strip() for col in
                   SATID_TABLE.replace("'{}' (", "").strip(')').split(',')]
        # Platform number
        satid = str(list(self.platforms.keys())[0])

        # Data from a platform that isn't configured
        self.db.platforms = {}
        self.db.update_db(self.tle, 'foo')
        self.assertFalse(table_exists(self.db.db, satid))
        self.assertFalse(self.db.updated)

        # Configured platform
        self.db.platforms = self.platforms
        self.db.update_db(self.tle, 'foo')
        self.assertTrue(table_exists(self.db.db, satid))
        self.assertTrue(self.db.updated)

        # Check that all the columns were added
        res = self.db.db.execute("select * from '%s'" % satid)
        names = [description[0] for description in res.description]
        for col in columns:
            self.assertTrue(col.split(' ')[0] in names)

        # Check the data
        data = res.fetchall()
        self.assertEqual(len(data), 1)
        # epoch
        self.assertEqual(data[0][0], '2008-09-20T12:25:40.104192')
        # TLE
        self.assertEqual(data[0][1], '\n'.join((line1, line2)))
        # Date when the data were added should be close to current time
        date_added = datetime.datetime.strptime(data[0][2], ISO_TIME_FORMAT)
        now = datetime.datetime.utcnow()
        self.assertTrue((now - date_added).total_seconds() < 1.0)
        # Source of the data
        self.assertTrue(data[0][3] == 'foo')

        # Try to add the same data again. Nothing should change even
        # if the source is different if the epoch is the same
        self.db.update_db(self.tle, 'bar')
        res = self.db.db.execute("select * from '%s'" % satid)
        data = res.fetchall()
        self.assertEqual(len(data), 1)
        date_added2 = datetime.datetime.strptime(data[0][2], ISO_TIME_FORMAT)
        self.assertEqual(date_added, date_added2)
        # Source of the data
        self.assertTrue(data[0][3] == 'foo')

    def test_write_tle_txt(self):
        """Test reading data from the database and writing it to a file."""
        import glob
        tle_dir = self.writer_config["output_dir"]

        # Put some data in the database
        self.db.update_db(self.tle, 'foo')

        # Fake that the database hasn't been updated
        self.db.updated = False

        # Try to dump the data to disk
        self.db.write_tle_txt()

        # The output dir hasn't been created
        self.assertFalse(os.path.exists(tle_dir))

        self.db.updated = True
        self.db.write_tle_txt()

        # The dir should be there
        self.assertTrue(os.path.exists(tle_dir))
        # There should be one file in the directory
        files = glob.glob(os.path.join(tle_dir, 'tle_*txt'))
        self.assertEqual(len(files), 1)
        # The file should have been named with the date ('%' characters
        # not there anymore)
        self.assertTrue('%' not in files[0])
        # The satellite name should be in the file
        with open(files[0], 'r') as fid:
            data = fid.read().split('\n')
        self.assertEqual(len(data), 3)
        self.assertTrue('ISS' in data[0])
        self.assertEqual(data[1], line1)
        self.assertEqual(data[2], line2)

        # Call the writing again, nothing should be written. In
        # real-life this assumes a re-run has been done without new
        # TLE data
        self.db.updated = False
        self.db.write_tle_txt()
        files = glob.glob(os.path.join(tle_dir, 'tle_*txt'))
        self.assertEqual(len(files), 1)

        # Force writing with every call
        # Do not write the satellite name
        self.db.writer_config["write_always"] = True
        self.db.writer_config["write_name"] = False
        self.db.write_tle_txt()
        files = sorted(glob.glob(os.path.join(tle_dir, 'tle_*txt')))
        self.assertEqual(len(files), 2)
        with open(files[1], 'r') as fid:
            data = fid.read().split('\n')
        self.assertEqual(len(data), 2)
        self.assertEqual(data[0], line1)
        self.assertEqual(data[1], line2)
示例#59
0
class S3TFRecordWriter(object):
    def __init__(self, fn):
        self.fn = fn
        if fn.startswith('s3://'):
            from boto3.s3.transfer import TransferConfig
            import boto3
            self.gclient = None
            self.s3client = boto3.client('s3', )
            self.storage_dir = TemporaryDirectory()
            self.writer = tf.python_io.TFRecordWriter(
                os.path.join(self.storage_dir.name, 'temp.tfrecord'))
            self.bucket_name, self.file_name = self.fn.split('s3://',
                                                             1)[1].split(
                                                                 '/', 1)
        elif fn.startswith('gs://'):
            from google.cloud import storage
            self.s3client = None
            self.gclient = storage.Client()
            self.storage_dir = TemporaryDirectory()
            self.writer = tf.python_io.TFRecordWriter(
                os.path.join(self.storage_dir.name, 'temp.tfrecord'))
            self.bucket_name, self.file_name = self.fn.split('gs://',
                                                             1)[1].split(
                                                                 '/', 1)

        else:
            self.s3client = None
            self.gclient = None
            self.bucket_name = None
            self.file_name = None
            self.storage_dir = None
            self.writer = tf.python_io.TFRecordWriter(fn)

    def write(self, x):
        self.writer.write(x)

    def close(self):
        self.writer.close()

        if self.s3client is not None:
            from boto3.s3.transfer import TransferConfig
            config = TransferConfig(multipart_threshold=1024 * 25,
                                    max_concurrency=10,
                                    multipart_chunksize=1024 * 25,
                                    use_threads=True)
            self.s3client.upload_file(
                os.path.join(self.storage_dir.name, 'temp.tfrecord'),
                self.bucket_name,
                self.file_name,
                ExtraArgs={'ACL': 'public-read'},
                Config=config,
            )
            self.storage_dir.cleanup()
        if self.gclient is not None:
            bucket = self.gclient.get_bucket(self.bucket_name)
            blob = bucket.blob(self.file_name)
            blob.upload_from_filename(
                os.path.join(self.storage_dir.name, 'temp.tfrecord'))
            self.storage_dir.cleanup()

    def __enter__(self):
        # Called when entering "with" context.
        return self

    def __exit__(self, *_):
        # Called when exiting "with" context.
        # Upload shit
        print("CALLING CLOSE")
        self.close()
示例#60
0
class TestAllowListProject(TestCase):
    """
    Tests for the bandersnatch filtering classes
    """
    def setUp(self) -> None:
        self.cwd = os.getcwd()
        self.tempdir = TemporaryDirectory()
        bandersnatch.storage.loaded_storage_plugins = defaultdict(list)
        os.chdir(self.tempdir.name)

    def tearDown(self) -> None:
        if self.tempdir:
            assert self.cwd
            os.chdir(self.cwd)
            self.tempdir.cleanup()

    def test__plugin__loads__explicitly_enabled(self) -> None:
        mock_config(contents="""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_project
""")

        plugins = bandersnatch.filter.LoadedFilters().filter_project_plugins()
        names = [plugin.name for plugin in plugins]
        self.assertListEqual(names, ["allowlist_project"])
        self.assertEqual(len(plugins), 1)

    def test__plugin__loads__default(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
""")

        plugins = bandersnatch.filter.LoadedFilters().filter_project_plugins()
        names = [plugin.name for plugin in plugins]
        self.assertNotIn("allowlist_project", names)

    def test__filter__matches__package(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_project

[allowlist]
packages =
    foo
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        mirror.packages_to_sync = {"foo": ""}
        mirror._filter_packages()

        self.assertIn("foo", mirror.packages_to_sync.keys())

    def test__filter__nomatch_package(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_project

[allowlist]
packages =
    foo
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        mirror.packages_to_sync = {"foo": "", "foo2": ""}
        mirror._filter_packages()

        self.assertIn("foo", mirror.packages_to_sync.keys())
        self.assertNotIn("foo2", mirror.packages_to_sync.keys())

    def test__filter__name_only(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_project

[allowlist]
packages =
    foo==1.2.3
""")

        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        mirror.packages_to_sync = {"foo": "", "foo2": ""}
        mirror._filter_packages()

        self.assertIn("foo", mirror.packages_to_sync.keys())
        self.assertNotIn("foo2", mirror.packages_to_sync.keys())

    def test__filter__varying__specifiers(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_project

[allowlist]
packages =
    foo==1.2.3
    bar~=3.0,<=1.5
""")
        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        mirror.packages_to_sync = {
            "foo": "",
            "bar": "",
            "snu": "",
        }
        mirror._filter_packages()

        self.assertEqual({"foo": "", "bar": ""}, mirror.packages_to_sync)

    def test__filter__commented__out(self) -> None:
        mock_config("""\
[mirror]
storage-backend = filesystem
workers = 2

[plugins]
enabled =
    allowlist_project

[allowlist]
packages =
    foo==1.2.3   # inline comment
#    bar
""")
        mirror = BandersnatchMirror(Path("."),
                                    Master(url="https://foo.bar.com"))
        mirror.packages_to_sync = {
            "foo": "",
            "bar": "",
            "snu": "",
        }
        mirror._filter_packages()

        self.assertEqual({"foo": ""}, mirror.packages_to_sync)