Skip to content

Commit

Permalink
test: run tests in groups to avoid version issues (#95)
Browse files Browse the repository at this point in the history
  • Loading branch information
avik-pal authored Jan 8, 2025
1 parent 58b1ad3 commit 564fc0c
Show file tree
Hide file tree
Showing 8 changed files with 89 additions and 39 deletions.
20 changes: 16 additions & 4 deletions .buildkite/testing.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
steps:
- group: ":julia: CUDA GPU"
steps:
- label: ":julia: Julia {{matrix.julia}} + CUDA GPU"
- label: ":julia: Julia {{matrix.julia}} + CUDA GPU + {{matrix.group}}"
plugins:
- JuliaCI/julia#v1:
version: "{{matrix.julia}}"
Expand All @@ -17,17 +17,23 @@ steps:
cuda: "*"
env:
BACKEND_GROUP: "CUDA"
BOLTZ_TEST_GROUP: "{{matrix.group}}"
if: build.message !~ /\[skip tests\]/ && build.message !~ /\[skip ci\]/
timeout_in_minutes: 60
matrix:
setup:
julia:
- "1.10"
- "1"
group:
- "layers"
- "others"
- "vision"
- "vision_metalhead"
- "integration"

- group: ":julia: AMD GPU"
steps:
- label: ":julia: Julia: {{matrix.julia}} + AMD GPU"
- label: ":julia: Julia: {{matrix.julia}} + AMD GPU + {{matrix.group}}"
plugins:
- JuliaCI/julia#v1:
version: "{{matrix.julia}}"
Expand All @@ -43,6 +49,7 @@ steps:
JULIA_AMDGPU_HIP_MUST_LOAD: "1"
JULIA_AMDGPU_DISABLE_ARTIFACTS: "1"
BACKEND_GROUP: "AMDGPU"
BOLTZ_TEST_GROUP: "{{matrix.group}}"
agents:
queue: "juliagpu"
rocm: "*"
Expand All @@ -52,8 +59,13 @@ steps:
matrix:
setup:
julia:
- "1.10"
- "1"
group:
- "layers"
- "others"
- "vision"
- "vision_metalhead"
- "integration"

env:
SECRET_CODECOV_TOKEN: "gZlC/IAmeUJehhP5mP2QuUV5a1qV61cvo4PUCLkA9vVkt3x6wgD6fTZmCm+f+gHkmkssFxX+q2h1Ud00XXc75H2LrjyR/cDTIthcO46BBOidYocv/U0gfhp6uT2IZ9fi+ryFfTVVpZ0RIUGmDTj0O/b5qt4oaTriAArLAq6mMipbIR9YCz7ZD/hWQXx8oDeAbnDpwQaddwPyhJhz95nayknOpuJj+ClaVOxgsLGZc3ZWiTj1QxkXBNwxLD2ALeG16Qxs9h7eK87sdcbWeTihvJ6OooARgpoVJAa2pJCFYOGy4Bh07c0VTZmicN2M3GIi74Y5T1PWNaz7nGeANO5Pow==;U2FsdGVkX1843DHkbGWCV9PArLBw0rNqmdy56VOTRNTifBSpkC796Oez1lMFU+yDtkElbcrRSIlS5hRFqpsaFA=="
16 changes: 16 additions & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,12 @@ jobs:
- "1"
os:
- ubuntu-latest
group:
- "layers"
- "others"
- "vision"
- "vision_metalhead"
- "integration"
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v2
Expand All @@ -48,6 +54,8 @@ jobs:
${{ runner.os }}-
- uses: julia-actions/julia-buildpkg@v1
- uses: julia-actions/julia-runtest@v1
env:
BOLTZ_TEST_GROUP: ${{ matrix.group }}
- uses: julia-actions/julia-processcoverage@v1
with:
directories: src,ext
Expand All @@ -66,6 +74,12 @@ jobs:
matrix:
version:
- "1.10"
group:
- "layers"
- "others"
- "vision"
- "vision_metalhead"
- "integration"
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v2
Expand All @@ -74,6 +88,8 @@ jobs:
- uses: julia-actions/julia-downgrade-compat@v1
- uses: julia-actions/julia-buildpkg@v1
- uses: julia-actions/julia-runtest@v1
env:
BOLTZ_TEST_GROUP: ${{ matrix.group }}
- uses: julia-actions/julia-processcoverage@v1
with:
directories: src,ext
Expand Down
12 changes: 0 additions & 12 deletions test/Project.toml
Original file line number Diff line number Diff line change
@@ -1,23 +1,17 @@
[deps]
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
Bumper = "8ce10254-0962-460f-a3d8-1f77fea1446e"
ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66"
DataInterpolations = "82cc6244-b520-54b8-b5a6-8a565e85f1d0"
Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
DynamicExpressions = "a40a106e-89c9-4ca8-8020-a735e8728b6b"
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
ExplicitImports = "7d51a73a-1435-4ff3-83d9-f097790105c7"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
Hwloc = "0e44f5e4-bd66-52a0-8798-143a42290a1d"
InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819"
LoopVectorization = "bdcacae8-1622-11e9-2a5c-532679323890"
Lux = "b2108857-7c20-44ae-9111-449ecde12c47"
LuxLib = "82251201-b29d-42c6-8e01-566dec8acb11"
LuxTestUtils = "ac9de150-d08f-4546-94fb-7472b5760531"
MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40"
Metalhead = "dbeba491-748d-5e0e-a39e-b530a07fa0cc"
NNlib = "872c559c-99b0-510c-b3b7-b6c96a88d5cd"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Expand All @@ -29,24 +23,18 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
Aqua = "0.8.7"
Bumper = "0.6, 0.7"
ComponentArrays = "0.15.16"
DataInterpolations = "6.4"
Downloads = "1.6"
DynamicExpressions = "1"
Enzyme = "0.13"
ExplicitImports = "1.9.0"
ForwardDiff = "0.10.36"
GPUArraysCore = "0.1.6, 0.2"
Hwloc = "3.2.0"
InteractiveUtils = "<0.0.1, 1"
JLD2 = "0.5"
LoopVectorization = "0.12.171"
Lux = "1"
LuxLib = "1"
LuxTestUtils = "1.1.2"
MLDataDevices = "1"
Metalhead = "0.9"
NNlib = "0.9.21"
Pkg = "1.10"
Random = "1.10"
Expand Down
4 changes: 2 additions & 2 deletions test/layer_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ end
end
end

@testitem "Spline Layer" setup=[SharedTestSetup] tags=[:layers] begin
@testitem "Spline Layer" setup=[SharedTestSetup] tags=[:integration] begin
using ComponentArrays, DataInterpolations, ForwardDiff, Zygote, MLDataDevices

@testset "$(mode)" for (mode, aType, dev, ongpu) in MODES
Expand Down Expand Up @@ -222,7 +222,7 @@ end
end
end

@testitem "Dynamic Expressions Layer" setup=[SharedTestSetup] tags=[:layers] begin
@testitem "Dynamic Expressions Layer" setup=[SharedTestSetup] tags=[:integration] begin
using DynamicExpressions, ForwardDiff, ComponentArrays, Bumper, LoopVectorization

operators = OperatorEnum(; binary_operators=[+, -, *], unary_operators=[cos])
Expand Down
2 changes: 1 addition & 1 deletion test/qa_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
end

@testitem "Explicit Imports: Quality Assurance" tags=[:others] begin
import Lux, Metalhead, Zygote # Load all trigger packages
import Lux, Zygote # Load all trigger packages
using ExplicitImports

@test check_no_implicit_imports(Boltz; skip=(Base, Core, Lux)) === nothing
Expand Down
45 changes: 33 additions & 12 deletions test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,10 +1,27 @@
using ReTestItems, Pkg, InteractiveUtils, Hwloc
using ReTestItems, Pkg, Hwloc, Test

@info sprint(versioninfo)
const ALL_BOTLZ_TEST_GROUPS = [
"layers", "others", "vision", "vision_metalhead", "integration"]

INPUT_TEST_GROUP = lowercase(get(ENV, "BOLTZ_TEST_GROUP", "all"))
const BOLTZ_TEST_GROUP = if startswith("!", INPUT_TEST_GROUP[1])
exclude_group = lowercase.(split(INPUT_TEST_GROUP[2:end], ","))
filter(x -> x exclude_group, ALL_BOTLZ_TEST_GROUPS)
else
[INPUT_TEST_GROUP]
end

const BACKEND_GROUP = lowercase(get(ENV, "BACKEND_GROUP", "all"))
const EXTRA_PKGS = String[]

if "all" BOLTZ_TEST_GROUP || "integration" BOLTZ_TEST_GROUP
append!(EXTRA_PKGS,
["DataInterpolations", "DynamicExpressions", "Bumper", "LoopVectorization"])
end
if "all" BOLTZ_TEST_GROUP || "vision_metalhead" BOLTZ_TEST_GROUP
push!(EXTRA_PKGS, "Metalhead")
end

(BACKEND_GROUP == "all" || BACKEND_GROUP == "cuda") && push!(EXTRA_PKGS, "LuxCUDA")
(BACKEND_GROUP == "all" || BACKEND_GROUP == "amdgpu") && push!(EXTRA_PKGS, "AMDGPU")

Expand All @@ -18,16 +35,20 @@ end

using Boltz

const BOLTZ_TEST_GROUP = get(ENV, "BOLTZ_TEST_GROUP", "all")
const RETESTITEMS_NWORKERS = parse(
Int, get(ENV, "RETESTITEMS_NWORKERS", string(min(Hwloc.num_physical_cores(), 4))))
const RETESTITEMS_NWORKER_THREADS = parse(Int,
get(ENV, "RETESTITEMS_NWORKER_THREADS",
string(max(Hwloc.num_virtual_cores() ÷ RETESTITEMS_NWORKERS, 1))))

@info "Running tests for group: $BOLTZ_TEST_GROUP with $RETESTITEMS_NWORKERS workers"

ReTestItems.runtests(
Boltz; tags=(BOLTZ_TEST_GROUP == "all" ? nothing : [Symbol(BOLTZ_TEST_GROUP)]),
nworkers=ifelse(BACKEND_GROUP ("cuda", "amdgpu"), 0, RETESTITEMS_NWORKERS),
nworker_threads=RETESTITEMS_NWORKER_THREADS, testitem_timeout=3600)
@testset "Boltz.jl Tests" begin
@testset "[$(tag)] [$(i)/$(length(BOLTZ_TEST_GROUP))]" for (i, tag) in enumerate(BOLTZ_TEST_GROUP)
nworkers = ifelse(
BACKEND_GROUP ("cuda", "amdgpu") &&
(tag == "vision" || tag == "vision_metalhead"),
0, RETESTITEMS_NWORKERS)
nworker_threads = parse(Int,
get(ENV, "RETESTITEMS_NWORKER_THREADS",
string(max(Hwloc.num_virtual_cores() ÷ max(nworkers, 1), 1))))

ReTestItems.runtests(Boltz; tags=(tag == "all" ? nothing : [Symbol(tag)]),
testitem_timeout=2400, nworkers, nworker_threads)
end
end
1 change: 0 additions & 1 deletion test/shared_testsetup.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import Reexport: @reexport
@reexport using Boltz, Lux, GPUArraysCore, LuxLib, LuxTestUtils, Random, StableRNGs
using MLDataDevices, JLD2
import Metalhead

LuxTestUtils.jet_target_modules!(["Boltz", "Lux", "LuxLib"])

Expand Down
28 changes: 21 additions & 7 deletions test/vision_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,9 @@ end
end
end

@testitem "ConvMixer" setup=[SharedTestSetup] tags=[:vision] begin
@testitem "ConvMixer" setup=[SharedTestSetup] tags=[:vision_metalhead] begin
import Metalhead

for (mode, aType, dev, ongpu) in MODES, name in [:small, :base, :large]
model = Vision.ConvMixer(name; pretrained=false)
ps, st = Lux.setup(Random.default_rng(), model) |> dev
Expand All @@ -66,7 +68,9 @@ end
end
end

@testitem "GoogLeNet" setup=[SharedTestSetup] tags=[:vision] begin
@testitem "GoogLeNet" setup=[SharedTestSetup] tags=[:vision_metalhead] begin
import Metalhead

for (mode, aType, dev, ongpu) in MODES
model = Vision.GoogLeNet(; pretrained=false)
ps, st = Lux.setup(Random.default_rng(), model) |> dev
Expand All @@ -80,7 +84,9 @@ end
end
end

@testitem "MobileNet" setup=[SharedTestSetup] tags=[:vision] begin
@testitem "MobileNet" setup=[SharedTestSetup] tags=[:vision_metalhead] begin
import Metalhead

for (mode, aType, dev, ongpu) in MODES, name in [:v1, :v2, :v3_small, :v3_large]
model = Vision.MobileNet(name; pretrained=false)
ps, st = Lux.setup(Random.default_rng(), model) |> dev
Expand All @@ -94,7 +100,9 @@ end
end
end

@testitem "ResNet" setup=[SharedTestSetup, PretrainedWeightsTestSetup] tags=[:vision] begin
@testitem "ResNet" setup=[SharedTestSetup, PretrainedWeightsTestSetup] tags=[:vision_metalhead] begin
import Metalhead

for (mode, aType, dev, ongpu) in MODES, depth in [18, 34, 50, 101, 152]
@testset for pretrained in [false, true]
model = Vision.ResNet(depth; pretrained)
Expand All @@ -114,7 +122,9 @@ end
end
end

@testitem "ResNeXt" setup=[SharedTestSetup, PretrainedWeightsTestSetup] tags=[:vision] begin
@testitem "ResNeXt" setup=[SharedTestSetup, PretrainedWeightsTestSetup] tags=[:vision_metalhead] begin
import Metalhead

for (mode, aType, dev, ongpu) in MODES
@testset for (depth, cardinality, base_width) in [
(50, 32, 4), (101, 32, 8), (101, 64, 4), (152, 64, 4)]
Expand All @@ -139,7 +149,9 @@ end
end
end

@testitem "WideResNet" setup=[SharedTestSetup, PretrainedWeightsTestSetup] tags=[:vision] begin
@testitem "WideResNet" setup=[SharedTestSetup, PretrainedWeightsTestSetup] tags=[:vision_metalhead] begin
import Metalhead

for (mode, aType, dev, ongpu) in MODES, depth in [50, 101, 152]
@testset for pretrained in [false, true]
depth == 152 && pretrained && continue
Expand All @@ -161,7 +173,9 @@ end
end
end

@testitem "SqueezeNet" setup=[SharedTestSetup, PretrainedWeightsTestSetup] tags=[:vision] begin
@testitem "SqueezeNet" setup=[SharedTestSetup, PretrainedWeightsTestSetup] tags=[:vision_metalhead] begin
import Metalhead

for (mode, aType, dev, ongpu) in MODES
@testset for pretrained in [false, true]
model = Vision.SqueezeNet(; pretrained)
Expand Down

0 comments on commit 564fc0c

Please sign in to comment.