def unknown_revision(): """ Hypothesis strategy returning a random revision not ingested into the test archive. """ return sha1().filter(lambda s: get_tests_data()["storage"].revision_get( [hash_to_bytes(s)])[0] is None)
def unknown_snapshot(): """ Hypothesis strategy returning a random revision not ingested into the test archive. """ return sha1().filter(lambda s: get_tests_data()["storage"]. snapshot_get_branches(hash_to_bytes(s)) is None)
def unknown_content(): """ Hypothesis strategy returning a random content not ingested into the test archive. """ return new_content().filter(lambda c: get_tests_data()[ "storage"].content_get_data(hash_to_bytes(c["sha1"])) is None)
def new_origin(): """ Hypothesis strategy returning a random origin not ingested into the test archive. """ return new_origin_strategy().filter(lambda origin: get_tests_data()[ "storage"].origin_get([origin.url])[0] is None)
def unknown_directory(): """ Hypothesis strategy returning a random directory not ingested into the test archive. """ return sha1().filter(lambda s: len( list(get_tests_data()["storage"].directory_missing([hash_to_bytes(s)])) ) > 0)
def directory_with_subdirs(): """ Hypothesis strategy returning a random directory containing sub directories ingested into the test archive. """ return directory().filter(lambda d: any([ e["type"] == "dir" for e in list(get_tests_data()["storage"]. directory_ls(hash_to_bytes(d))) ]))
def _get_origin_dfs_revisions_walker(): tests_data = get_tests_data() storage = tests_data["storage"] origin = random.choice(tests_data["origins"][:-1]) snapshot = snapshot_get_latest(storage, origin["url"]) if snapshot.branches[b"HEAD"].target_type.value == "alias": target = snapshot.branches[b"HEAD"].target head = snapshot.branches[target].target else: head = snapshot.branches[b"HEAD"].target return get_revisions_walker("dfs", storage, head)
def origin_with_multiple_visits(): """ Hypothesis strategy returning a random origin ingested into the test archive. """ ret = [] tests_data = get_tests_data() storage = tests_data["storage"] for origin in tests_data["origins"]: visit_page = storage.origin_visit_get(origin["url"]) if len(visit_page.results) > 1: ret.append(origin) return sampled_from(ret)
def origin_with_pull_request_branches(): """ Hypothesis strategy returning a random origin with pull request branches ingested into the test archive. """ ret = [] tests_data = get_tests_data() storage = tests_data["storage"] origins = storage.origin_list(limit=1000) for origin in origins.results: snapshot = snapshot_get_latest(storage, origin.url) if any([b"refs/pull/" in b for b in snapshot.branches]): ret.append(origin) return sampled_from(ret)
def origin_with_releases(): """ Hypothesis strategy returning a random origin ingested into the test archive. """ ret = [] tests_data = get_tests_data() for origin in tests_data["origins"]: snapshot = snapshot_get_latest(tests_data["storage"], origin["url"]) if any([ b.target_type.value == "release" for b in snapshot.branches.values() ]): ret.append(origin) return sampled_from(ret)
def setUpClass(cls): super().setUpClass() tests_data = get_tests_data() cls.storage = tests_data['storage'] cls.idx_storage = tests_data['idx_storage'] cls.mimetype_indexer = tests_data['mimetype_indexer'] cls.language_indexer = tests_data['language_indexer'] cls.license_indexer = tests_data['license_indexer'] cls.ctags_indexer = tests_data['ctags_indexer'] # Update swh-web configuration to use the in-memory storage # instantiated in the tests.data module swh_config = config.get_config() swh_config.update({'storage': cls.storage}) service.storage = cls.storage # Update swh-web configuration to use the in-memory indexer storage # instantiated in the tests.data modules swh_config.update({'indexer_storage': cls.idx_storage}) service.idx_storage = cls.idx_storage
def _init_content_tests_data(data_path, data_dict, ext_key): """ Helper function to read the content of a directory, store it into a test archive and add some files metadata (sha1 and/or expected programming language) in a dict. Args: data_path (str): path to a directory relative to the tests folder of swh-web data_dict (dict): the dict that will store files metadata ext_key (bool): whether to use file extensions or filenames as dict keys """ test_contents_dir = os.path.join(os.path.dirname(__file__), data_path).encode("utf-8") directory = from_disk.Directory.from_disk(path=test_contents_dir) contents = [] for name, obj_ in directory.items(): obj = obj_.to_model() if obj.object_type in [ Content.object_type, DiskBackedContent.object_type ]: c = obj.with_data().to_dict() c["status"] = "visible" sha1 = hash_to_hex(c["sha1"]) if ext_key: key = name.decode("utf-8").split(".")[-1] filename = "test." + key else: filename = name.decode("utf-8").split("/")[-1] key = filename language = get_hljs_language_from_filename(filename) data_dict[key] = {"sha1": sha1, "language": language} contents.append(Content.from_dict(c)) storage = get_tests_data()["storage"] storage.content_add(contents)
def tests_data(): data = get_tests_data(reset=True) # Update swh-web configuration to use the in-memory storages # instantiated in the tests.data module override_storages(data["storage"], data["idx_storage"], data["search"]) return data
"server_url": "http://localhost:8080/auth/", "realm_name": "SoftwareHeritage", }, }) from .common import * # noqa from .common import ALLOWED_HOSTS, LOGGING # noqa, isort: skip DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": swh_web_config["test_db"], } } # when not running unit tests, make the webapp fetch data from memory storages if "pytest" not in sys.argv[0] and "PYTEST_XDIST_WORKER" not in os.environ: swh_web_config.update({"debug": True, "e2e_tests_mode": True}) from swh.web.tests.data import get_tests_data, override_storages test_data = get_tests_data() override_storages(test_data["storage"], test_data["idx_storage"], test_data["search"]) else: ALLOWED_HOSTS += ["testserver", SWH_WEB_INTERNAL_SERVER_NAME] ALLOWED_HOSTS += get_config()["staging_server_names"] # Silent DEBUG output when running unit tests LOGGING["handlers"]["console"]["level"] = "INFO" # type: ignore
def _known_swh_object(object_type): return sampled_from(get_tests_data()[object_type])
def _filter_checksum(cs): generated_checksums = get_tests_data()["generated_checksums"] if not int.from_bytes(cs, byteorder="little") or cs in generated_checksums: return False generated_checksums.add(cs) return True