async def test_get_file_from_previous_node(
    special_2nodes_configuration,
    project_id,
    node_uuid,
    filemanager_cfg,
    node_link,
    store_link,
    item_type,
    item_value,
    item_pytype,
):
    config_dict, _, _ = special_2nodes_configuration(
        prev_node_outputs=[("output_123", item_type,
                            store_link(item_value, project_id, node_uuid))],
        inputs=[("in_15", item_type, node_link("output_123"))],
        project_id=project_id,
        previous_node_id=node_uuid,
    )
    PORTS = await node_ports.ports()
    await check_config_valid(PORTS, config_dict)
    file_path = await (await PORTS.inputs)["in_15"].get()
    assert isinstance(file_path, item_pytype)
    assert file_path == Path(tempfile.gettempdir(), "simcorefiles", "in_15",
                             Path(item_value).name)
    assert file_path.exists()
    filecmp.clear_cache()
    assert filecmp.cmp(file_path, item_value)
Exemple #2
0
async def test_port_file_accessors(
    special_configuration: Callable,
    filemanager_cfg: None,
    s3_simcore_location: str,
    bucket: str,
    item_type: str,
    item_value: str,
    item_pytype: Type,
    config_value: Dict[str, str],
    user_id: int,
    project_id: str,
    node_uuid: str,
    e_tag: str,
):  # pylint: disable=W0613, W0621

    config_value[
        "path"] = f"{project_id}/{node_uuid}/{Path(config_value['path']).name}"

    config_dict, _project_id, _node_uuid = special_configuration(
        inputs=[("in_1", item_type, config_value)],
        outputs=[("out_34", item_type, None)],
    )

    assert _project_id == project_id
    assert _node_uuid == node_uuid

    PORTS = await node_ports_v2.ports(user_id=user_id,
                                      project_id=project_id,
                                      node_uuid=node_uuid)
    await check_config_valid(PORTS, config_dict)
    assert await (await
                  PORTS.outputs)["out_34"].get() is None  # check emptyness
    with pytest.raises(exceptions.StorageInvalidCall):
        await (await PORTS.inputs)["in_1"].get()

    # this triggers an upload to S3 + configuration change
    await (await PORTS.outputs)["out_34"].set(item_value)
    # this is the link to S3 storage
    received_file_link = (await PORTS.outputs)["out_34"].value.dict(
        by_alias=True, exclude_unset=True)
    assert received_file_link["store"] == s3_simcore_location
    assert (received_file_link["path"] == Path(
        str(project_id), str(node_uuid),
        Path(item_value).name).as_posix())
    # the eTag is created by the S3 server
    assert received_file_link["eTag"]

    # this triggers a download from S3 to a location in /tempdir/simcorefiles/item_key
    assert isinstance(await (await PORTS.outputs)["out_34"].get(), item_pytype)
    assert (await (await PORTS.outputs)["out_34"].get()).exists()
    assert str(await (await PORTS.outputs)["out_34"].get()).startswith(
        str(
            Path(
                tempfile.gettempdir(),
                "simcorefiles",
                f"{threading.get_ident()}",
                "out_34",
            )))
    filecmp.clear_cache()
    assert filecmp.cmp(item_value, await (await PORTS.outputs)["out_34"].get())
async def test_port_file_accessors(special_configuration, storage,
                                   filemanager_cfg, s3_simcore_location,
                                   bucket, item_type, item_value, item_pytype,
                                   config_value):  # pylint: disable=W0613, W0621
    config_dict, project_id, node_uuid = special_configuration(
        inputs=[("in_1", item_type, config_value)],
        outputs=[("out_34", item_type, None)])
    PORTS = node_ports.ports()
    check_config_valid(PORTS, config_dict)
    assert await PORTS.outputs["out_34"].get() is None  # check emptyness
    # with pytest.raises(exceptions.S3InvalidPathError, message="Expecting S3InvalidPathError"):
    #     await PORTS.inputs["in_1"].get()

    # this triggers an upload to S3 + configuration change
    await PORTS.outputs["out_34"].set(item_value)
    # this is the link to S3 storage
    assert PORTS.outputs["out_34"].value == {
        "store":
        s3_simcore_location,
        "path":
        Path(str(project_id), str(node_uuid),
             Path(item_value).name).as_posix()
    }
    # this triggers a download from S3 to a location in /tempdir/simcorefiles/item_key
    assert isinstance(await PORTS.outputs["out_34"].get(), item_pytype)
    assert (await PORTS.outputs["out_34"].get()).exists()
    assert str(await PORTS.outputs["out_34"].get()).startswith(
        str(Path(tempfile.gettempdir(), "simcorefiles", "out_34")))
    filecmp.clear_cache()
    assert filecmp.cmp(item_value, await PORTS.outputs["out_34"].get())
Exemple #4
0
def directories_are_same(left_directory, right_directory):
    """Check recursively whether two directories contain the same files.
    Based on https://stackoverflow.com/a/6681395

    Keyword arguments:
    left_directory -- one of the two directories to compare
    right_directory -- the other directory to compare
    """
    filecmp.clear_cache()
    directory_comparison = filecmp.dircmp(a=left_directory, b=right_directory)
    if (len(directory_comparison.left_only) > 0
            or len(directory_comparison.right_only) > 0
            or len(directory_comparison.funny_files) > 0):
        return False

    filecmp.clear_cache()
    (_, mismatch,
     errors) = filecmp.cmpfiles(a=left_directory,
                                b=right_directory,
                                common=directory_comparison.common_files,
                                shallow=False)
    if len(mismatch) > 0 or len(errors) > 0:
        return False

    for common_dir in directory_comparison.common_dirs:
        if not directories_are_same(
                left_directory=left_directory.joinpath(common_dir),
                right_directory=right_directory.joinpath(common_dir)):
            return False

    return True
Exemple #5
0
 def test_cache_clear(self):
     first_compare = filecmp.cmp(self.name, self.name_same, shallow=False)
     second_compare = filecmp.cmp(self.name, self.name_diff, shallow=False)
     filecmp.clear_cache()
     self.assertTrue(
         len(filecmp._cache) == 0,
         "Cache not cleared after calling clear_cache")
Exemple #6
0
def is_same(path1, path2, verbose=False):
    """
    Compare the content of the two directory trees.
    :param path1: Left path to compare from
    :param path2: Right path to compare with
    :rtype True is they are the same or False if they differ
    """
    # Clear the file structure cache
    filecmp.clear_cache()
    compared = dircmp(path1, path2)

    if (compared.left_only or compared.right_only or compared.diff_files
            or compared.funny_files):
        # Displays a summary report if differences are found
        if verbose:
            compared.report_partial_closure()
            return False
        else:
            print('Files that differ: {}\n'.format(compared.diff_files))
            print('Files only in {}: {}\n'.format(path1, compared.left_only))
            print('Files only in {}: {}\n'.format(path2, compared.right_only))
            return False
        for subdir in compared.common_dirs:
            if not is_same(os.path.join(path1, subdir),
                           os.path.join(path2, subdir)):
                return False
            return True
Exemple #7
0
def compare_files(test, expected):
    lines_expected = open(expected).readlines()
    lines_test = open(test).readlines()

    print(''.join(difflib.ndiff(lines_test, lines_expected)))
    filecmp.clear_cache()
    assert filecmp.cmp(test, expected) is True
Exemple #8
0
    def test_file_operations(self):
        """Check the server's ability to receive and return files."""
        with open(self.sample_file, 'rb') as file:
            response = requests.post(self.server_url, files={'name': file})
        self.assertEqual(response.status_code, 201)
        uuid = response.text

        # Uploaded file is the same as the local one
        _, extension = path.splitext(self.sample_file)
        filecmp.clear_cache()
        self.assertTrue(
            filecmp.cmp(self.sample_file,
                        path.join(FILEDIR, f'{uuid}{extension}')))

        # Server responses with the proper filename
        check_response = urlopen(self.server_url + '?id=' + uuid)
        self.assertEqual(check_response.read().decode(),
                         path.basename(self.sample_file))

        # Downloaded file is the same as the previously uploaded
        download, _ = urlretrieve(
            self.server_url + '?id=' + uuid + '&download=1',
            path.join(BASE_DIR, 'download'))
        self.assertTrue(filecmp.cmp(download, self.sample_file))

        remove(download)
        remove(path.join(FILEDIR, f'{uuid}{extension}'))

        with sqlite3.connect(DATABASE) as conn:
            query = '''DELETE FROM filepaths
                       WHERE uuid = :uuid
                    '''
            conn.execute(query, {'uuid': uuid})
        conn.close()
        def confronta_file():

            if self.filename1_confronta == "INPUT1 VUOTO":
                KOFile1InputMessage()
            elif self.filename2_confronta == "INPUT2 VUOTO":
                KOFile2InputMessage()
            else:
                compare_file = filecmp.cmp(self.filename1_confronta,
                                           self.filename2_confronta,
                                           shallow=True)
                if compare_file:
                    messaggio = "FILE 1 : " + str(
                        self.filename1_confronta) + "\n" "FILE 2 : " + str(
                            self.filename2_confronta
                        ) + "\n\nIl contenuto dei due file risulta identico."
                    msg = messagebox.showinfo("CONFRONTO FILE OK",
                                              str(messaggio))
                else:
                    messaggio = "FILE 1 : " + str(
                        self.filename1_confronta) + "\n" "FILE 2 : " + str(
                            self.filename2_confronta
                        ) + "\n\nIl contenuto dei due file risulta DIVERSO!!!"
                    msg = messagebox.showerror("CONFRONTO FILE ERROR",
                                               str(messaggio))
            filecmp.clear_cache()
    def test_convert_dwg2svg(self, dwg_input, dwg_path, dxf_path):
        for dwg in dwg_path:
            dxf = "./assets/dxf/" + os.path.basename(dwg)[:-4] + ".dxf"
            dwg_input.dwg2dxf_converter(dwg, dxf)

            clear_cache()
            assert cmp(dxf,
                       './test/assets/simple/' + os.path.basename(dwg)[:-4] +
                       ".dxf",
                       shallow=True)
Exemple #11
0
    def __init__(self, test_suffixes, tests_directory,
                 updated_outputs_directory):
        self.test_suffixes = test_suffixes
        self.tests_directory = Path(tests_directory)
        self.updated_outputs_directory = Path(updated_outputs_directory)
        self.missing_or_empty_sources = []
        self.deleted_files = []
        self.updated_files = []

        # invalidate the file comparison cache
        clear_cache()
def validate_rdg_storage_format_subset(subset_path, superset_path, verbose=False):
    """
    uses the RDG at subset_path to validate the structure of the RDG at superset_path
    ensures that all files in the subset are available in the superset, and are identical
    This does not prove the two RDGs are identical, only that the RDG at superset_path is at
    least a superset of the RDG at subset_path
    """
    manifest_files = []
    part_header_files = []
    compare_paths = []

    file_paths = [x for x in subset_path.glob("**/*") if x.is_file()]

    # sort out the json files
    for path in file_paths:
        if manifest_filename_substring in path.name:
            manifest_files.append(path)
        elif part_header_filename_substring in path.name:
            part_header_files.append(path)
        else:
            # all json files that we do not recognize will end up getting compared as normal files
            compare_paths.append(path)

    # compare all parquet and data files
    filecmp.clear_cache()
    for subset_file in compare_paths:
        subset_no_rand = remove_rand_string(subset_file.name)
        found = False
        for superset_file in superset_path.glob(f"{subset_no_rand}*"):
            if filecmp.cmp(subset_file, superset_file, shallow=False):
                if verbose:
                    print(f"{subset_file} in {subset_path} is equal to {superset_file} in {superset_path}")
                found = True

        if not found:
            print(f"Failed to find matching file for {subset_no_rand}-* in {superset_path}")
            return False

    # compare the json files
    for manifest in manifest_files:
        if verbose:
            print(f"checking json manifest files {manifest}, {superset_path / manifest.name}")
        if not json_match(manifest, superset_path / manifest.name, verbose):
            print(f"json manifest file {manifest} does not match {superset_path / manifest.name}")
            return False

    for part_header in part_header_files:
        if verbose:
            print(f"checking json part_header files {part_header}, {superset_path / part_header.name}")
        if not json_match(part_header, superset_path / part_header.name, verbose):
            print(f"json part_header file {part_header} does not match {superset_path / part_header.name}")
            return False

    return True
 def files_compare(self):
     '''比较两个文件的差异性'''
     file1 = r'/root/python3/tt.txt'
     file2 = r'/root/python3/{}'.format(self.save_file())
     if not filecmp.cmp(file1, file2):
         text1_lines = self.readfile(file1)
         text2_lines = self.readfile(file2)
         d = difflib.HtmlDiff()
         compare_result = d.make_file(text1_lines, text2_lines)
         return compare_result
     filecmp.clear_cache()
Exemple #14
0
 def test_save_text(self):
     this = os.path.dirname(__file__)
     onnx_file = os.path.join(this, "models", "coreml_OneHotEncoder_BikeSharing.onnx")
     onnx_model = load_model(onnx_file)
     json_file = os.path.join(this, "models", "coreml_OneHotEncoder_BikeSharing.json")
     json_file_new = os.path.join(this, "models", "coreml_OneHotEncoder_BikeSharing_new.json")
     save_text(onnx_model, json_file_new)
     filecmp.clear_cache()
     content1 = self._parseEOL(json_file)
     content2 = self._parseEOL(json_file_new)
     self.assertTrue(content1 == content2,
                     "Output file from save_text is different than reference output.")
Exemple #15
0
def restorefile(target, source):
    """ Restores a file at target to backup source and unlinks the backup source if the restoration is successful """
    if not source.exists():
        raise OSError("Backup not accessable for restoration!")
    if target.exists(): target.unlink()
    shutil.copy2(str(source), str(target))
    filecmp.clear_cache()
    if not filecmp.cmp(str(source), str(target), shallow=False):
        raise OSError(
            f"File backup restoration not identical to original; you may need to restore it yourself:\nTarget Location: {target}\nBackup File: {source}"
        )
    source.unlink()
Exemple #16
0
    def treeTest(self, keepResultFile=False) -> None:
        tree_result_file_name = "tree_result.txt"
        original_stdout = sys.stdout

        with open(tree_result_file_name, "w") as f:
            sys.stdout = f

            tree = BinaryTree()
            print(tree.isEmpty())
            tree.add(10)  # 10 is root
            # left
            tree.add(5)
            tree.add(7)
            tree.add(3)

            # right
            tree.add(15)
            tree.add(13)
            tree.add(17)

            print(tree.getMinValue())
            print(tree.getMaxValue())
            print(tree.isEmpty())

            tree.print()

            print("\n---Copying tree---")
            copy_tree = BinaryTree()
            tree.preOrder(lambda el: copy_tree.add(el))
            copy_tree.print()

            print("\n---To only right nodes--")
            only_right_node = BinaryTree()
            tree.inOrder(lambda x: only_right_node.add(x))
            only_right_node.print()

            print("\n---Rebalance to right tree--")
            rebalance_to_right_tree = BinaryTree()
            tree.postOrder(lambda x: rebalance_to_right_tree.add(x))
            rebalance_to_right_tree.print()

            sys.stdout = original_stdout

        print("Tree result: ", end="")
        filecmp.clear_cache()
        if filecmp.cmp(tree_result_file_name,
                       "test_answer_key/tree_answer_key.txt"):
            print(bcolors.GREEN + bcolors.BOLD + "PASSED" + bcolors.ENDC)
            if not keepResultFile:
                os.remove(tree_result_file_name)
        else:
            print(bcolors.RED + bcolors.BOLD + "FAILED" + bcolors.ENDC)
Exemple #17
0
 def test_backupdatabase(self):
     """ Tests that backupdatabase properly backs up a database """
     with open(self.testfile, 'w') as f:
         f.write("hello world")
     myhash = hashfile(self.testfile)
     backup = util.backupdatabase(self.testfile.name)
     self.assertTrue(isinstance(backup, pathlib.Path))
     self.assertEqual(hashfile(backup), myhash)
     ## backupdatabase do these tests iteslf
     self.assertTrue(backup.exists())
     filecmp.clear_cache()
     self.assertTrue(
         filecmp.cmp(str(self.testfile), str(backup), shallow=False))
    def __init__(self,
                 test_suffixes,
                 tests_directory,
                 updated_outputs_directory):
        self.test_suffixes = test_suffixes
        self.tests_directory = Path(tests_directory)
        self.updated_outputs_directory = Path(updated_outputs_directory)
        self.missing_or_empty_sources = []
        self.deleted_files = []
        self.updated_files = []

        # invalidate the file comparison cache
        clear_cache()
Exemple #19
0
    def test_save(self):
        """save locally."""
        with self.temp_directory() as directory:
            dir2 = directory.clone()
            self.assertEqual(len(dir2), len(directory))
            try:
                gpxfile = self.create_test_track()
                directory.add(gpxfile)
                self.assertEqual(len(directory), 1)
                aclone = gpxfile.clone()
                self.assertEqualTracks(gpxfile, aclone)
                dir2.scan()
                self.assertEqual(len(dir2), 1)

                gpxfile2 = gpxfile.clone()
                self.assertEqualTracks(gpxfile, gpxfile2)
                directory.add(gpxfile2)
                self.assertEqual(len(directory), 2)
                dir2.add(gpxfile2)
                self.assertEqual(len(dir2), 2)

                track2_copy = dir2.add(gpxfile2.clone())
                self.assertEqualTracks(gpxfile, track2_copy)
                self.assertEqualTracks(gpxfile2, track2_copy)
                self.assertIs(gpxfile.backend, directory)
                self.assertIs(gpxfile2.backend, directory)
                self.assertIs(track2_copy.backend, dir2)
                self.assertEqual(len(directory), 2)
                self.assertEqual(len(dir2), 3)
                directory.scan()  # we changed it through dir2
                self.assertEqual(len(directory), 3)
                dir2.scan()
                self.assertEqual(len(directory), 3)
                title = 'whatevertitle'
                for _ in dir2:
                    _.id_in_backend = title
                trunk = os.path.join(directory.url, title)
                expected_names = [trunk + x + '.gpx' for x in ('.1', '.2', '')]
                files = sorted(
                    os.path.join(directory.url, x)
                    for x in os.listdir(directory.url) if x.endswith('.gpx'))
                self.assertEqual(files, expected_names)
                self.assertEqual(len(dir2), 3)
                directory.scan()
                dir2.merge(directory, remove=True)
                self.assertEqual(len(dir2), 1)
                filecmp.clear_cache()
            finally:
                dir2.detach()
def test_bam_coverage_offset_minus1():
    """
    Test -bs 1 --Offset -1
    """
    outfile = '/tmp/test_offset.bw'
    args = "--Offset -1 -b {} -p 1 -bs 1 -o {}".format(BAMFILE_A, outfile)
    args = args.split()
    bam_cov.main(args)
    try:
        # python 3 only
        filecmp.clear_cache()
    except:
        pass
    assert (filecmp.cmp(outfile, "{}testA_offset-1.bw".format(ROOT)) is True)
    unlink(outfile)
def test_bam_coverage_offset20_minus4():
    """
    Test -bs 1 --Offset 20 -4
    """
    outfile = '/tmp/test_offset.bw'
    args = "--Offset 20 -4 -b {} -p 1 -bs 1 -o {}".format(BAMFILE_A, outfile)
    args = args.split()
    bam_cov.main(args)
    try:
        # python 3 only
        filecmp.clear_cache()
    except:
        pass
    assert(filecmp.cmp(outfile, "{}testA_offset20_-4.bw".format(ROOT)) is True)
    unlink(outfile)
Exemple #22
0
def backupgeneralfile(file):
    """ Backs up any given file in it's parent directory and returns the backup's pathlib object (based on backupdatabase). """
    parent = file.parent
    timecode = time.time()
    backup = parent / f"backup_{timecode}_{file.name}"
    while backup.exists():
        timecode = time.time()
        backup = file.parent / f"backup_{timecode}_{file.name}"
    shutil.copy2(str(file), str(backup))
    if not backup.exists():
        raise FileNotFoundError(f"Failed to backup file")
    filecmp.clear_cache()
    if not filecmp.cmp(str(file), str(backup), shallow=False):
        raise OSError("File backup not identical to original")
    return backup
Exemple #23
0
def test_search():
    print('## SEARCH ##')

    #Stats
    totalTest, passedTest, memoryGoodTest = 0, 0, 0

    refTimeSum, mineTimeSum, diffTimeSum, percentTimeSum = 0, 0, 0, 0

    files = [TESTS_DIR + f for f in os.listdir(TESTS_DIR) if os.path.isfile(os.path.join(TESTS_DIR, f))]
    for filename in files:
        if filename[-len(TESTS_EXT):] != TESTS_EXT:
            continue
        totalTest += 1
        print('# File: ', filename)

        timeMine, memUsed = search(filename, APP_BIN_PATH, DICT_BIN, LOG_FILE)
        if memUsed < 512:
            memoryGoodTest += 1

        timeRef, _ = search(filename, REF_APP_BIN_PATH, REF_DICT_BIN, REF_LOG_FILE)

        filecmp.clear_cache()
        if not filecmp.cmp(LOG_FILE, REF_LOG_FILE):
            print('!! Diff with ref:')
            print('!!!!!!!!!!!!!!!!!!!!!!!Our!!!!!!!!!!!!!!!!!!!!!!!')
            with open(LOG_FILE, 'r') as f:
                print(f.read())
            print('!!!!!!!!!!!!!!!!!!!!!!!Ref!!!!!!!!!!!!!!!!!!!!!!!')
            with open(REF_LOG_FILE, 'r') as f:
                print(f.read())

        else:
            passedTest += 1
        refTimeSum += timeRef
        mineTimeSum += timeMine
        diffTimeSum += round(timeMine - timeRef, 3)
        percentTimeSum += round(timeMine / timeRef, 2)
        print('>> Time analysis: Ref', timeRef, 's | Ours', timeMine, 's | Difference',
              round(timeMine - timeRef, 3), 's | Ratio perf', round(timeMine / timeRef, 2))

    print('\n#########\n# RECAP #\n#########')
    print('>> Passed test:', passedTest, '/', totalTest, '(', 100 * passedTest // totalTest, '%)')
    print('>> Memory passed test:', memoryGoodTest, '/', totalTest, '(', 100 * memoryGoodTest // totalTest, '%)')
    print('>> Total Time analysis: Ref', round(refTimeSum, 3), 's | Ours',
          round(mineTimeSum, 3),
          's | Difference', round(diffTimeSum, 3), 's')
    print('>> Average Time analysis: Ref', round(refTimeSum / totalTest, 3), 's | Ours', round(mineTimeSum / totalTest, 3),
          's | Difference', round(diffTimeSum / totalTest, 3), 's | Ratio perf ', round(percentTimeSum / totalTest, 2))
def test_bam_coverage_offset_minus1():
    """
    Test -bs 1 --Offset -1
    """
    outfile = '/tmp/test_offset.bw'
    for fname in [BAMFILE_A, CRAMFILE_A]:
        args = "--Offset -1 -b {} -p 1 -bs 1 -o {}".format(fname, outfile)
        args = args.split()
        bam_cov.main(args)
        try:
            # python 3 only
            filecmp.clear_cache()
        except:
            pass
        assert(filecmp.cmp(outfile, "{}testA_offset-1.bw".format(ROOT)) is True)
        unlink(outfile)
Exemple #25
0
    def test_save(self):
        """save locally"""
        with Directory(cleanup=True) as directory:
            dir2 = self.clone_backend(directory)
            try:
                activity = self.create_test_activity()
                activity.backend = directory
                self.assertEqual(len(directory), 1)
                aclone = activity.clone()
                self.assertEqualActivities(activity, aclone)

                self.assertEqual(len(dir2), 1)

                activity2 = activity.clone()
                self.assertEqualActivities(activity, activity2)
                activity2.backend = directory
                self.assertEqual(len(directory), 2)
                with self.assertRaises(Exception):
                    activity2.backend = dir2
                with self.assertRaises(Exception):
                    activity2.backend = None
                activity3 = dir2.save(activity2)
                self.assertEqualActivities(activity, activity3)
                self.assertEqualActivities(activity2, activity3)
                self.assertIs(activity.backend, directory)
                self.assertIs(activity2.backend, directory)
                self.assertIs(activity3.backend, dir2)
                self.assertEqual(len(directory), 2)
                self.assertEqual(len(dir2), 2)
                directory.scan()  # we changed it through dir2
                self.assertEqual(len(directory), 3)
                trunk = os.path.join(directory.url, 'Random GPX # 0')
                expected_names = list(trunk + x
                                      for x in ('.gpx', '.1.gpx', '.2.gpx'))
                files = list(
                    os.path.join(directory.url, x)
                    for x in os.listdir(directory.url) if x.endswith('.gpx'))
                self.assertEqual(files, expected_names)
                filecmp.clear_cache()
                for idx1, idx2 in ((0, 1), (0, 2)):
                    file1 = files[idx1]
                    file2 = files[idx2]
                    self.assertTrue(
                        filecmp.cmp(file1, file2),
                        'Files are different: {} and {}'.format(file1, file2))
            finally:
                dir2.destroy()
Exemple #26
0
def test_bam_coverage_offset20_minus4():
    """
    Test -bs 1 --Offset 20 -4
    """
    outfile = '/tmp/test_offset.bw'
    for fname in [BAMFILE_A, CRAMFILE_A]:
        args = "--Offset 20 -4 -b {} -p 1 -bs 1 -o {}".format(fname, outfile)
        args = args.split()
        bam_cov.main(args)
        try:
            # python 3 only
            filecmp.clear_cache()
        except:
            pass
        assert (filecmp.cmp(outfile, "{}testA_offset20_-4.bw".format(ROOT)) is
                True)
        unlink(outfile)
Exemple #27
0
    def test_update_feature(self):
        event_connection = AnnotationEventDB(None)
        stable_id_service = OSIDService(None)
        event_collection = EventCollection('test', event_connection,
                                           stable_id_service)
        event_collection.event_types = {'merge_gene'}
        event_collection.create()

        ga = GFFAnnotations('./test_update_feature.gff',
                            './test_update_feature.out_gff', event_collection)
        ga.annotate_gff()
        filecmp.clear_cache()
        self.assertEqual(
            True,
            filecmp.cmp('./expected_update_feature.gff',
                        './test_update_feature.out_gff',
                        shallow=False))
Exemple #28
0
async def test_get_file_from_previous_node_with_mapping_of_same_key_name(
    special_2nodes_configuration: Callable,
    user_id: int,
    project_id: str,
    node_uuid: str,
    filemanager_cfg: None,
    node_link: Callable,
    store_link: Callable,
    postgres_db: sa.engine.Engine,
    item_type: str,
    item_value: str,
    item_alias: str,
    item_pytype: Type,
):
    config_dict, _, this_node_uuid = special_2nodes_configuration(
        prev_node_inputs=None,
        prev_node_outputs=[("in_15", item_type, await store_link(item_value))],
        inputs=[("in_15", item_type, node_link("in_15"))],
        outputs=None,
        project_id=project_id,
        previous_node_id=f"{uuid4()}",
        node_id=node_uuid,
    )
    PORTS = await node_ports_v2.ports(user_id=user_id,
                                      project_id=project_id,
                                      node_uuid=node_uuid)
    await check_config_valid(PORTS, config_dict)
    # add a filetokeymap
    config_dict["schema"]["inputs"]["in_15"]["fileToKeyMap"] = {
        item_alias: "in_15"
    }
    np_helpers.update_configuration(postgres_db, project_id, this_node_uuid,
                                    config_dict)  # pylint: disable=E1101
    await check_config_valid(PORTS, config_dict)
    file_path = await (await PORTS.inputs)["in_15"].get()
    assert isinstance(file_path, item_pytype)
    assert file_path == Path(
        tempfile.gettempdir(),
        "simcorefiles",
        f"{threading.get_ident()}",
        "in_15",
        item_alias,
    )
    assert file_path.exists()
    filecmp.clear_cache()
    assert filecmp.cmp(file_path, item_value)
Exemple #29
0
def backupdatabase(filename):  ## Tested- GeneralDatabase
    """ Creates a duplicate of the file named "backup{timecode}_{filename} and checks that the backup is identical. Returns the backup's filepath as a pathlib.Path instance. """
    dbfile = (filestructure.DATABASEPATH / filename).resolve()
    if not dbfile.exists():
        raise ValueError("File does not exist in the database directory.")
    timecode = time.time()
    backup = dbfile.parent / f"backup_{timecode}_{filename}"
    while backup.exists():
        timecode = time.time()
        backup = dbfile.parent / f"backup_{timecode}_{dbfile.name}"
    shutil.copy2(str(dbfile), str(backup))
    if not backup.exists():
        raise FileNotFoundError(f"Failed to backup database")
    filecmp.clear_cache()
    if not filecmp.cmp(str(dbfile), str(backup), shallow=False):
        raise OSError("Database backup not identical to original")
    return backup
Exemple #30
0
def get_diff():

    file1 = open("dumps/zaz.txt", 'r')
    file2 = open("dumps/nous.txt", 'r')

    filecmp.clear_cache()
    if filecmp.cmp("dumps/zaz.txt", "dumps/nous.txt", False) == False:
        text1 = file1.readlines()
        text2 = file2.readlines()
        for line in difflib.unified_diff(text1, text2):
            print(line)
        file1.close()
        file2.close()
        return 1

    file1.close()
    file2.close()
    return 0
Exemple #31
0
def copyModuleCode(repoPaths, projectPaths):
    repo_map = {
        "aberrant-expression-pipeline": "AberrantExpression",
        "aberrant-splicing-pipeline": "AberrantSplicing",
        "mae-pipeline": "MonoallelicExpression",
        "rvc-pipeline": "rnaVariantCalling"
    }

    for repo, analysis_dir in repo_map.items():
        fc.clear_cache()  # clear file compare cache to avoid mistakes
        base_repo = repoPaths["modules"] / repo
        local_proj = projectPaths["Scripts"] / analysis_dir / "pipeline"
        if not local_proj.is_dir():  # module directory does not exist. copy it
            logger.info(
                f"{local_proj} is not a directory, copy over from drop base")
            copy_tree(str(base_repo), str(local_proj))
        else:  #module dir does exist. Do a safe-overwrite
            overwrite(base_repo, local_proj)
Exemple #32
0
def overwrite(base_repo, local_proj):
    fc.clear_cache()  # clear file compare cache to avoid mistakes
    compare_obj = fc.dircmp(base_repo, local_proj)

    #remove all things not in the base_repo
    for i in compare_obj.right_only:
        logger.info(f"removing local file {i} it is not in the base drop")
        if os.path.isfile(local_proj / i):
            removeFile(local_proj / i, warn=False)
        else:
            remove_tree(local_proj / i)

    # for all dirs and files in base_dir
    for i in compare_obj.left_list:
        #files
        if os.path.isfile(base_repo / i):
            # filename is the same in both
            if i in compare_obj.common_files:

                # if file is diff copy original over. otherwise do nothing
                if i in compare_obj.diff_files:
                    logger.info(
                        f"overwriting {local_proj / i} with {base_repo / i})")
                    copy2(base_repo / i, local_proj / i)

            # file not present in local project. Copy it
            else:
                logger.info(
                    f"overwriting {local_proj / i} with {base_repo / i})")
                copy2(base_repo / i, local_proj / i)

        #dirs
        elif os.path.isdir(base_repo / i):
            if i in compare_obj.common_dirs:
                overwrite(base_repo / i, local_proj / i)
            else:
                logger.info(
                    f"the directory {str(base_repo / i)} does not exist locally. copying here: {str(local_proj)}"
                )
                copy_tree(str(base_repo / i), str(local_proj / i))

        # other?
        else:
            logger.info(i, "is something other than file or dir. Ignoring")
Exemple #33
0
def process_test_file(csv_cuck_file):
    assertCsv(csv_cuck_file)

    cuck_input_path = getPathFromFileName(csv_cuck_file)
    cuck_result_path = get_result_file_path(csv_cuck_file)
    cuck_expect_path = get_expect_file_path(csv_cuck_file)

    filter = read_filter(cuck_input_path)

    assert filter is not None,"filter couldn't be loaded"

    write_filter(filter, cuck_result_path)

    # check file equality
    assert filecmp.cmp(cuck_expect_path, cuck_result_path, shallow=False), "Generated test file unequal precomputed file"

    # delete generated file
    os.remove(cuck_result_path)
    filecmp.clear_cache()
def file_compare(path1, path2):
    log.debug("compare %r vs, %r", path1, path2)
    assert_is_file(path1)
    assert_is_file(path2)

    # check if file refer to same file
    if path1.samefile(path2):
        log.debug("File refer to the same file")
        return True

    # compare file size:
    size1 = path1.stat().st_size
    size2 = path2.stat().st_size
    if size1 != size2:
        log.debug("Not the same file: different file size.")
        return False

    # Compare file content:
    filecmp.clear_cache()  # if we didn't clear the cache: unittests may be failed!
    return filecmp.cmp(str(path1), str(path2), shallow=False)  # str() needed for python 3.5
Exemple #35
0
def download_mirrors(config):
    """Retrieve mirrors from manjaro.org
    :param config:
    :returns: tuple with bool for mirrors.json and status.json
    :rtype: tuple
    """
    fetchmirrors = False
    fetchstatus = False
    # mirrors.json
    try:
        with urlopen(config["url_mirrors_json"]) as response:
            mirrorlist = json.loads(response.read().decode("utf8"),
                                    object_pairs_hook=collections.OrderedDict)
        fetchmirrors = True
        tempfile = config["work_dir"] + "/temp.file"
        jsonFn.json_dump_file(mirrorlist, tempfile)
        filecmp.clear_cache()
        if fileFn.check_existance_of(conf.USR_DIR, folder=True):
            if not fileFn.check_existance_of(config["mirror_file"]):
                jsonFn.json_dump_file(mirrorlist, config["mirror_file"])
            elif not filecmp.cmp(tempfile, config["mirror_file"]):
                jsonFn.json_dump_file(mirrorlist, config["mirror_file"])
        os.remove(tempfile)
    except (HTTPException, json.JSONDecodeError, URLError):
        pass
    # status.json
    try:
        with urlopen(config["url_status_json"]) as response:
            statuslist = json.loads(
                response.read().decode("utf8"),
                object_pairs_hook=collections.OrderedDict)
        fetchstatus = True
        jsonFn.write_json_file(statuslist, config["status_file"])
    except (HTTPException, json.JSONDecodeError, URLError):
        pass
    # result
    return fetchmirrors, fetchstatus
Exemple #36
0
            shutil.copyfile(tmp_name, self.path + self.script)


        # cleaning
        tmp.close()
        os.remove(tmp_name)


# I don't like it here.
pool = workerpool.WorkerPool(size=10)


# Get script name and URL from config and put a job in the pool
def update(section, path):
    for option in config.options(section):
        work = Updater(option, config.get(section, option, raw=True), path)
        pool.put(work)


# Call all the functions!
config, scripts_path, common_path = check_config()
update('scripts', scripts_path)
update('common', common_path)

# Cleanup and exit
clear_cache()
pool.shutdown()
pool.wait()
print("All done!")
input('Press enter to Exit')
Exemple #37
0
def diff_replica(filters, script_path):

    utils = Utils()

    replica_list = utils.read_file_list(os.path.join(script_path, filters.FILE_REPLICAS))

    available_replica_list = [replica for replica in replica_list if os.path.isdir(replica) and not replica == script_path]

    for x in available_replica_list:
        print("{}: {}".format(available_replica_list.index(x), x))
        
    val = ""
    if not val.isdigit():
        val = prompt("Select replica index to check differences or -1 to cancel: ")
        if val == "-1":
            return

    dest_path = available_replica_list[int(val)]

    # for x in os.listdir(dest_path):
    #     print(os.path.join(dest_path, x))
    #
    # print()
    #
    # for x in os.listdir(script_path):
    #     print(os.path.join(script_path, x))

    current_elems = set(os.listdir(script_path))
    replica_elems = set(os.listdir(dest_path))

    new_in_current = current_elems - replica_elems
    new_in_replica = replica_elems - current_elems

    if len(new_in_current) > 0:
        print("New in current [{}]:\n".format(script_path))
        for x in new_in_current:
            print("    {}".format(x))

    if len(new_in_replica) > 0:
        print("\nNew in replica [{}]:\n".format(dest_path))
        for x in new_in_replica:
            print("    {}".format(x))

    if sys.version_info.major > 2:
        filecmp.clear_cache()
    same_elements = current_elems & replica_elems
    for x in same_elements:
        if not filters.in_ignore_list(script_path, x):
            current_file_path = os.path.join(script_path, x)
            replica_file_path = os.path.join(dest_path, x)
            if not filecmp.cmp(current_file_path, replica_file_path):
                current_mt_timestamp = os.stat(current_file_path)[stat.ST_MTIME]
                replica_mt_timestamp = os.stat(replica_file_path)[stat.ST_MTIME]
                current_mt = datetime.datetime.fromtimestamp(current_mt_timestamp)
                replica_mt = datetime.datetime.fromtimestamp(replica_mt_timestamp)
                print("difference: {} current: {} replica: {}".format(x, current_mt, replica_mt))
                if current_mt_timestamp >= replica_mt_timestamp:
                    print("    current seems more recent")
                else:
                    print("    replica seems more recent")
        else:
            print("file {} is in ignore list, ignored.".format(x))
Exemple #38
0
def not_collision(fn1, fn2):
    import filecmp
    filecmp.clear_cache()
    return filecmp.cmp(fn1, fn2, False)
 def test_cache_clear(self):
     first_compare = filecmp.cmp(self.name, self.name_same, shallow=False)
     second_compare = filecmp.cmp(self.name, self.name_diff, shallow=False)
     filecmp.clear_cache()
     self.assertTrue(len(filecmp._cache) == 0,
                     "Cache not cleared after calling clear_cache")
Exemple #40
0
def have_same_content(path1, path2):
	filecmp.clear_cache()
	return filecmp.cmp(path1, path2)