Skip to content

Commit

Permalink
Merge pull request #684 from bhearsum/glob
Browse files Browse the repository at this point in the history
fix: properly support globbed upstream artifacts in subdirectories
  • Loading branch information
bhearsum authored Feb 26, 2025
2 parents 4cbdb23 + 7aa33c5 commit b3ed801
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 44 deletions.
3 changes: 2 additions & 1 deletion src/scriptworker/artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,6 +440,7 @@ def get_artifacts_matching_glob(context, task_id, pattern):
matching = []
for root, _, files in os.walk(parent_dir):
for f in files:
if fnmatch.fnmatch(f, pattern):
relpath = os.path.relpath(os.path.join(root, f), parent_dir)
if fnmatch.fnmatch(relpath, pattern):
matching.append(os.path.join(root, f))
return matching
22 changes: 13 additions & 9 deletions tests/test_artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,33 +281,37 @@ def test_get_upstream_artifacts_full_paths_per_task_id(context):


@pytest.mark.parametrize(
"artifacts_to_create,artifact_filenames,pattern",
"artifacts_to_create,artifact_filenames,pattern,extra_dir",
(
(("file_a1", "file_b1", "file_c1"), ("file_a1", "file_b1", "file_c1"), "*"),
(("file_a1", "file_b1", "file_c1", "foo.log", "bar.log"), ("foo.log", "bar.log"), "*.log"),
(("file_a1", "file_b1", "file_c1"), ("file_a1", "file_b1", "file_c1"), "*", None),
(("file_a1", "file_b1", "file_c1", "foo.log", "bar.log"), ("foo.log", "bar.log"), "*.log", None),
(("file_a1", "file_b1", "file_c1"), ("file_a1", "file_b1", "file_c1"), "public/*", "public"),
(("file_a1", "file_b1", "file_c1", "foo.log", "bar.log"), ("foo.log", "bar.log"), "public/*.log", "public"),
),
)
def test_get_upstream_artifacts_full_paths_per_task_id_with_globs(context, artifacts_to_create, artifact_filenames, pattern):
def test_get_upstream_artifacts_full_paths_per_task_id_with_globs(context, artifacts_to_create, artifact_filenames, pattern, extra_dir):
context.task["payload"] = {
"upstreamArtifacts": [
{"paths": [pattern], "taskId": "dependency1", "taskType": "build"},
]
}

for artifact in artifacts_to_create:
folder = os.path.join(context.config["work_dir"], "cot", "dependency1", "public")
artifact_dir = os.path.join(context.config["work_dir"], "cot", "dependency1")
if extra_dir:
artifact_dir = os.path.join(artifact_dir, extra_dir)

for artifact in artifacts_to_create:
try:
os.makedirs(os.path.join(folder))
os.makedirs(os.path.join(artifact_dir))
except FileExistsError:
pass
touch(os.path.join(folder, artifact))
touch(os.path.join(artifact_dir, artifact))

succeeded_artifacts, failed_artifacts = get_upstream_artifacts_full_paths_per_task_id(context)

# ensure deterministic sorting here...
assert "dependency1" in succeeded_artifacts
expected = set([os.path.join(context.config["work_dir"], "cot", "dependency1", "public", f) for f in artifact_filenames])
expected = set([os.path.join(artifact_dir, f) for f in artifact_filenames])
assert set(succeeded_artifacts["dependency1"]) == expected
assert failed_artifacts == {}

Expand Down
53 changes: 19 additions & 34 deletions tests/test_cot_verify.py
Original file line number Diff line number Diff line change
Expand Up @@ -828,52 +828,37 @@ async def fake_download(x, y, path):

# download_cot_artifacts {{{1
@pytest.mark.parametrize(
"upstreamArtifacts,expected",
"upstreamArtifacts,expected,artifact_prefix",
(
([{"taskId": "task_id", "paths": ["*"]}], ["foo", "bar", "baz", "live.log", "test.log"]),
([{"taskId": "task_id", "paths": ["*.log"]}], ["live.log", "test.log"]),
([{"taskId": "task_id", "paths": ["*"]}], ["foo", "bar", "baz", "live.log", "test.log"], None),
([{"taskId": "task_id", "paths": ["*.log"]}], ["live.log", "test.log"], None),
([{"taskId": "task_id", "paths": ["public/*"]}], ["public/foo", "public/bar", "public/baz", "public/live.log", "public/test.log"], "public"),
([{"taskId": "task_id", "paths": ["public/*.log"]}], ["public/live.log", "public/test.log"], "public"),
),
)
@pytest.mark.asyncio
async def test_download_cot_artifacts_wildcard(chain, mocker, upstreamArtifacts, expected):
async def test_download_cot_artifacts_wildcard(chain, mocker, upstreamArtifacts, expected, artifact_prefix):
async def fake_download(x, y, path):
return path

async def fake_artifacts(*args, **kwargs):
return {
"artifacts": [
artifacts = []
for name in ("foo", "bar", "baz", "live.log", "test.log"):
if artifact_prefix:
artifact_name = f"{artifact_prefix}/{name}"
else:
artifact_name = name

artifacts.append(
{
"storageType": "s3",
"name": "foo",
"name": artifact_name,
"expires": "2025-03-01T16:04:04.463Z",
"contentType": "text/plain",
},
{
"storageType": "s3",
"name": "bar",
"expires": "2025-03-01T16:04:04.463Z",
"contentType": "text/plain",
},
{
"storageType": "s3",
"name": "baz",
"expires": "2025-03-01T16:04:04.463Z",
"contentType": "text/plain",
},
{
"storageType": "s3",
"name": "live.log",
"expires": "2025-03-01T16:04:04.463Z",
"contentType": "text/plain",
},
{
"storageType": "s3",
"name": "test.log",
"expires": "2025-03-01T16:04:04.463Z",
"contentType": "text/plain",
},
]
}
}
)

return {"artifacts": artifacts}

chain.task["payload"]["upstreamArtifacts"] = upstreamArtifacts
mocker.patch.object(cotverify, "download_cot_artifact", new=fake_download)
Expand Down

0 comments on commit b3ed801

Please sign in to comment.