Skip to content

Commit

Permalink
Internal refactor: no default cache dirname
Browse files Browse the repository at this point in the history
The two cache dirs are currently "refs" and "downloads". Make these
always explicit, rather than `"downloads"` being an internal default
baked into the downloader.
  • Loading branch information
sirosen committed Nov 25, 2024
1 parent 79efd48 commit 73fd93f
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 18 deletions.
11 changes: 3 additions & 8 deletions src/check_jsonschema/cachedownloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _base_cache_dir() -> str | None:
return cache_dir


def _resolve_cache_dir(dirname: str = "downloads") -> str | None:
def _resolve_cache_dir(dirname: str) -> str | None:
cache_dir = _base_cache_dir()
if cache_dir:
cache_dir = os.path.join(cache_dir, "check_jsonschema", dirname)
Expand Down Expand Up @@ -100,13 +100,8 @@ class FailedDownloadError(Exception):


class CacheDownloader:
def __init__(
self, cache_dir: str | None = None, disable_cache: bool = False
) -> None:
if cache_dir is None:
self._cache_dir = _resolve_cache_dir()
else:
self._cache_dir = _resolve_cache_dir(cache_dir)
def __init__(self, cache_dir: str, *, disable_cache: bool = False) -> None:
self._cache_dir = _resolve_cache_dir(cache_dir)
self._disable_cache = disable_cache

def _download(
Expand Down
2 changes: 1 addition & 1 deletion src/check_jsonschema/schema_loader/readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def __init__(
self.url = url
self.parsers = ParserSet()
self.downloader = CacheDownloader(
disable_cache=disable_cache,
"downloads", disable_cache=disable_cache
).bind(url, cache_filename, validation_callback=self._parse)
self._parsed_schema: dict | _UnsetType = _UNSET

Expand Down
2 changes: 1 addition & 1 deletion src/check_jsonschema/schema_loader/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def create_retrieve_callable(
base_uri = retrieval_uri

cache = ResourceCache()
downloader = CacheDownloader("refs", disable_cache)
downloader = CacheDownloader("refs", disable_cache=disable_cache)

def get_local_file(uri: str) -> t.Any:
path = filename2path(uri)
Expand Down
17 changes: 9 additions & 8 deletions tests/unit/test_cachedownloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def default_response():


def test_default_filename_from_uri(default_response):
cd = CacheDownloader().bind("https://example.com/schema1.json")
cd = CacheDownloader("downloads").bind("https://example.com/schema1.json")
assert cd._filename == "schema1.json"


Expand Down Expand Up @@ -76,7 +76,7 @@ def fake_expanduser(path):
monkeypatch.setattr(platform, "system", fakesystem)
monkeypatch.setattr(os.path, "expanduser", fake_expanduser)

cd = CacheDownloader()
cd = CacheDownloader("downloads")
assert cd._cache_dir == expect_value

if sysname == "Darwin":
Expand Down Expand Up @@ -114,7 +114,7 @@ def test_cachedownloader_cached_file(tmp_path, monkeypatch, default_response):
f.write_text("{}")

# set the cache_dir to the tmp dir (so that cache_dir will always be set)
cd = CacheDownloader(cache_dir=tmp_path).bind(str(f))
cd = CacheDownloader(tmp_path).bind(str(f))
# patch the downloader to skip any download "work"
monkeypatch.setattr(
cd._downloader, "_download", lambda file_uri, filename, response_ok: str(f)
Expand All @@ -128,7 +128,7 @@ def test_cachedownloader_cached_file(tmp_path, monkeypatch, default_response):
def test_cachedownloader_on_success(get_download_cache_loc, disable_cache):
add_default_response()
f = get_download_cache_loc("schema1.json")
cd = CacheDownloader(disable_cache=disable_cache).bind(
cd = CacheDownloader("downloads", disable_cache=disable_cache).bind(
"https://example.com/schema1.json"
)

Expand Down Expand Up @@ -171,7 +171,7 @@ def test_cachedownloader_succeeds_after_few_errors(
)
add_default_response()
f = get_download_cache_loc("schema1.json")
cd = CacheDownloader(disable_cache=disable_cache).bind(
cd = CacheDownloader("downloads", disable_cache=disable_cache).bind(
"https://example.com/schema1.json"
)

Expand Down Expand Up @@ -205,7 +205,7 @@ def test_cachedownloader_fails_after_many_errors(
)
add_default_response() # never reached, the 11th response
f = get_download_cache_loc("schema1.json")
cd = CacheDownloader(disable_cache=disable_cache).bind(
cd = CacheDownloader("downloads", disable_cache=disable_cache).bind(
"https://example.com/schema1.json"
)
with pytest.raises(FailedDownloadError):
Expand All @@ -226,6 +226,7 @@ def test_cachedownloader_retries_on_bad_data(get_download_cache_loc, disable_cac
add_default_response()
f = get_download_cache_loc("schema1.json")
cd = CacheDownloader(
"downloads",
disable_cache=disable_cache,
).bind(
"https://example.com/schema1.json",
Expand Down Expand Up @@ -281,7 +282,7 @@ def fake_mktime(*args):
if file_exists:
inject_cached_download(uri, original_file_contents)

cd = CacheDownloader().bind(uri)
cd = CacheDownloader("downloads").bind(uri)

# if the file already existed, it will not be overwritten by the cachedownloader
# so the returned value for both the downloader and a direct file read should be the
Expand Down Expand Up @@ -326,7 +327,7 @@ def dummy_validate_bytes(data):

# construct a downloader pointed at the schema and file, expecting a cache hit
# and use the above validation method
cd = CacheDownloader().bind(
cd = CacheDownloader("downloads").bind(
"https://example.com/schema1.json",
validation_callback=dummy_validate_bytes,
)
Expand Down

0 comments on commit 73fd93f

Please sign in to comment.