Skip to content

Commit

Permalink
fix order of tests
Browse files Browse the repository at this point in the history
  • Loading branch information
JSabadin committed Jan 10, 2025
1 parent 26b806e commit 6022e67
Showing 1 changed file with 28 additions and 8 deletions.
36 changes: 28 additions & 8 deletions tests/test_data/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,10 +490,19 @@ def generator():
)


@pytest.mark.dependency(name="test_clone_dataset")
def test_clone_dataset(
bucket_storage: BucketStorage, dataset_name: str, tempdir: Path
):
@pytest.mark.dependency(name="test_clone_dataset_local")
def test_clone_dataset_local(dataset_name: str, tempdir: Path):
_test_clone_dataset(BucketStorage.LOCAL, dataset_name, tempdir)


@pytest.mark.dependency(
name="test_clone_dataset_gcs", depends=["test_clone_dataset_local"]
)
def test_clone_dataset_gcs(dataset_name: str, tempdir: Path):
_test_clone_dataset(BucketStorage.GCS, dataset_name, tempdir)


def _test_clone_dataset(bucket_storage, dataset_name: str, tempdir: Path):
dataset = LuxonisDataset(
dataset_name,
bucket_storage=bucket_storage,
Expand Down Expand Up @@ -527,10 +536,21 @@ def generator1():
assert df_cloned.equals(df_original)


@pytest.mark.dependency(depends=["test_clone_dataset"])
def test_merge_datasets(
bucket_storage: BucketStorage, dataset_name: str, tempdir: Path
):
@pytest.mark.dependency(
name="test_merge_datasets_local", depends=["test_clone_dataset_gcs"]
)
def test_merge_datasets_local(dataset_name: str, tempdir: Path):
_test_merge_datasets(BucketStorage.LOCAL, dataset_name, tempdir)


@pytest.mark.dependency(
name="test_merge_datasets_gcs", depends=["test_merge_datasets_local"]
)
def test_merge_datasets_gcs(dataset_name: str, tempdir: Path):
_test_merge_datasets(BucketStorage.GCS, dataset_name, tempdir)


def _test_merge_datasets(bucket_storage, dataset_name: str, tempdir: Path):
dataset1_name = dataset_name + "_1"
dataset1 = LuxonisDataset(
dataset1_name,
Expand Down

0 comments on commit 6022e67

Please sign in to comment.