diff --git a/src/check_jsonschema/cachedownloader.py b/src/check_jsonschema/cachedownloader.py index 6b9865126..40f1d2cbb 100644 --- a/src/check_jsonschema/cachedownloader.py +++ b/src/check_jsonschema/cachedownloader.py @@ -33,7 +33,7 @@ def _base_cache_dir() -> str | None: return cache_dir -def _resolve_cache_dir(dirname: str = "downloads") -> str | None: +def _resolve_cache_dir(dirname: str) -> str | None: cache_dir = _base_cache_dir() if cache_dir: cache_dir = os.path.join(cache_dir, "check_jsonschema", dirname) @@ -100,13 +100,8 @@ class FailedDownloadError(Exception): class CacheDownloader: - def __init__( - self, cache_dir: str | None = None, disable_cache: bool = False - ) -> None: - if cache_dir is None: - self._cache_dir = _resolve_cache_dir() - else: - self._cache_dir = _resolve_cache_dir(cache_dir) + def __init__(self, cache_dir: str, *, disable_cache: bool = False) -> None: + self._cache_dir = _resolve_cache_dir(cache_dir) self._disable_cache = disable_cache def _download( diff --git a/src/check_jsonschema/schema_loader/readers.py b/src/check_jsonschema/schema_loader/readers.py index 65280808b..f496d42eb 100644 --- a/src/check_jsonschema/schema_loader/readers.py +++ b/src/check_jsonschema/schema_loader/readers.py @@ -79,7 +79,7 @@ def __init__( self.url = url self.parsers = ParserSet() self.downloader = CacheDownloader( - disable_cache=disable_cache, + "downloads", disable_cache=disable_cache ).bind(url, cache_filename, validation_callback=self._parse) self._parsed_schema: dict | _UnsetType = _UNSET diff --git a/src/check_jsonschema/schema_loader/resolver.py b/src/check_jsonschema/schema_loader/resolver.py index 5084328a5..a82ae31c9 100644 --- a/src/check_jsonschema/schema_loader/resolver.py +++ b/src/check_jsonschema/schema_loader/resolver.py @@ -66,7 +66,7 @@ def create_retrieve_callable( base_uri = retrieval_uri cache = ResourceCache() - downloader = CacheDownloader("refs", disable_cache) + downloader = CacheDownloader("refs", disable_cache=disable_cache) def get_local_file(uri: str) -> t.Any: path = filename2path(uri) diff --git a/tests/unit/test_cachedownloader.py b/tests/unit/test_cachedownloader.py index 30d47052b..99382e921 100644 --- a/tests/unit/test_cachedownloader.py +++ b/tests/unit/test_cachedownloader.py @@ -30,7 +30,7 @@ def default_response(): def test_default_filename_from_uri(default_response): - cd = CacheDownloader().bind("https://example.com/schema1.json") + cd = CacheDownloader("downloads").bind("https://example.com/schema1.json") assert cd._filename == "schema1.json" @@ -76,7 +76,7 @@ def fake_expanduser(path): monkeypatch.setattr(platform, "system", fakesystem) monkeypatch.setattr(os.path, "expanduser", fake_expanduser) - cd = CacheDownloader() + cd = CacheDownloader("downloads") assert cd._cache_dir == expect_value if sysname == "Darwin": @@ -114,7 +114,7 @@ def test_cachedownloader_cached_file(tmp_path, monkeypatch, default_response): f.write_text("{}") # set the cache_dir to the tmp dir (so that cache_dir will always be set) - cd = CacheDownloader(cache_dir=tmp_path).bind(str(f)) + cd = CacheDownloader(tmp_path).bind(str(f)) # patch the downloader to skip any download "work" monkeypatch.setattr( cd._downloader, "_download", lambda file_uri, filename, response_ok: str(f) @@ -128,7 +128,7 @@ def test_cachedownloader_cached_file(tmp_path, monkeypatch, default_response): def test_cachedownloader_on_success(get_download_cache_loc, disable_cache): add_default_response() f = get_download_cache_loc("schema1.json") - cd = CacheDownloader(disable_cache=disable_cache).bind( + cd = CacheDownloader("downloads", disable_cache=disable_cache).bind( "https://example.com/schema1.json" ) @@ -171,7 +171,7 @@ def test_cachedownloader_succeeds_after_few_errors( ) add_default_response() f = get_download_cache_loc("schema1.json") - cd = CacheDownloader(disable_cache=disable_cache).bind( + cd = CacheDownloader("downloads", disable_cache=disable_cache).bind( "https://example.com/schema1.json" ) @@ -205,7 +205,7 @@ def test_cachedownloader_fails_after_many_errors( ) add_default_response() # never reached, the 11th response f = get_download_cache_loc("schema1.json") - cd = CacheDownloader(disable_cache=disable_cache).bind( + cd = CacheDownloader("downloads", disable_cache=disable_cache).bind( "https://example.com/schema1.json" ) with pytest.raises(FailedDownloadError): @@ -226,6 +226,7 @@ def test_cachedownloader_retries_on_bad_data(get_download_cache_loc, disable_cac add_default_response() f = get_download_cache_loc("schema1.json") cd = CacheDownloader( + "downloads", disable_cache=disable_cache, ).bind( "https://example.com/schema1.json", @@ -281,7 +282,7 @@ def fake_mktime(*args): if file_exists: inject_cached_download(uri, original_file_contents) - cd = CacheDownloader().bind(uri) + cd = CacheDownloader("downloads").bind(uri) # if the file already existed, it will not be overwritten by the cachedownloader # so the returned value for both the downloader and a direct file read should be the @@ -326,7 +327,7 @@ def dummy_validate_bytes(data): # construct a downloader pointed at the schema and file, expecting a cache hit # and use the above validation method - cd = CacheDownloader().bind( + cd = CacheDownloader("downloads").bind( "https://example.com/schema1.json", validation_callback=dummy_validate_bytes, )