Skip to content

Commit

Permalink
Initial solution of Integration tests control. (#185)
Browse files Browse the repository at this point in the history
* Initial solution of Integration tests control.
- 'sbt test' now does no run Integration tests
- To run IT or DB tests is needed to call extra command alias defined in .sbtrc
  • Loading branch information
miroslavpojer authored May 23, 2024
1 parent 8adf2b3 commit cc8892e
Show file tree
Hide file tree
Showing 43 changed files with 221 additions and 103 deletions.
66 changes: 55 additions & 11 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,6 @@ on:
jobs:
build-agent-and-model:
runs-on: ubuntu-latest
strategy:
matrix:
scala: [2.11.12, 2.12.18, 2.13.11]
steps:
- name: Checkout code
uses: actions/checkout@v2
Expand All @@ -38,10 +35,25 @@ jobs:
with:
java-version: "[email protected]"

- name: Build and run tests
run: sbt "project agent" ++${{matrix.scala}} test doc "project model" ++${{matrix.scala}} test doc
- name: Build and run unit tests
run: sbt "project agent" test doc "project model" test doc

build-server:
build-server-unit:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- uses: coursier/cache-action@v5

- name: Setup Scala
uses: olafurpg/setup-scala@v14
with:
java-version: "[email protected]"

- name: Build and run unit tests
run: sbt "project server" test

build-server-integration:
runs-on: ubuntu-latest
services:
postgres:
Expand All @@ -56,9 +68,6 @@ jobs:
--health-retries 5
ports:
- 5432:5432
strategy:
matrix:
scala: [2.13.11]
steps:
- name: Checkout code
uses: actions/checkout@v2
Expand All @@ -72,5 +81,40 @@ jobs:
- name: Prepare testing database
run: sbt flywayMigrate

- name: Build and run tests
run: sbt "project server" ++${{matrix.scala}} test doc
- name: Build and run integration tests
run: sbt "project server" testIT doc

build-database:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:15
env:
POSTGRES_PASSWORD: postgres
POSTGRES_DB: atum_db
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- name: Checkout code
uses: actions/checkout@v2
- uses: coursier/cache-action@v5

- name: Setup Scala
uses: olafurpg/setup-scala@v14
with:
java-version: "[email protected]"

# Fails on balta dbConnection error issue: https://github.com/AbsaOSS/balta/issues/23
# - name: Build and run unit tests
# run: sbt "project database" test

- name: Prepare testing database
run: sbt flywayMigrate

- name: Build and run database/integration tests
run: sbt testDB doc
17 changes: 8 additions & 9 deletions .github/workflows/jacoco_check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,8 @@ on:
types: [ opened, edited, synchronize, reopened ]

env:
scalaLong12: 2.13.11
scalaShort12: "2.13"
jaCocoReportVersion: "v1.6.1"
scalaLong: 2.13.11
scalaShort: "2.13"
overall: 60.0
changed: 80.0

Expand All @@ -50,27 +49,27 @@ jobs:
- name: Add coverage to PR
if: steps.jacocorun.outcome == 'success'
id: jacoco-agent
uses: madrapps/jacoco-report@${{ env.jaCocoReportVersion }}
uses: madrapps/jacoco-report@v1.6.1
with:
name: agent-jacoco-report
paths: ${{ github.workspace }}/agent/target/jvm-${{ env.scalaShort12 }}/jacoco/report/jacoco.xml
paths: ${{ github.workspace }}/agent/target/jvm-${{ env.scalaShort }}/jacoco/report/jacoco.xml
token: ${{ secrets.GITHUB_TOKEN }}
min-coverage-overall: ${{ env.overall }}
min-coverage-changed-file: ${{ env.changed }}
title: JaCoCo agent module code coverage report - scala ${{ env.scalaLong12 }}
title: JaCoCo agent module code coverage report - scala ${{ env.scalaLong }}
update-comment: true
# model module code coverage
- name: Add coverage to PR
if: steps.jacocorun.outcome == 'success'
id: jacoco-model
uses: madrapps/jacoco-report@${{ env.jaCocoReportVersion }}
uses: madrapps/jacoco-report@v1.6.1
with:
name: model-jacoco-report
paths: ${{ github.workspace }}/model/target/jvm-${{ env.scalaShort12 }}/jacoco/report/jacoco.xml
paths: ${{ github.workspace }}/model/target/jvm-${{ env.scalaShort }}/jacoco/report/jacoco.xml
token: ${{ secrets.GITHUB_TOKEN }}
min-coverage-overall: ${{ env.overall }}
min-coverage-changed-file: ${{ env.changed }}
title: JaCoCo model module code coverage report - scala ${{ env.scalaLong12 }}
title: JaCoCo model module code coverage report - scala ${{ env.scalaLong }}
update-comment: true
- name: Get the Coverage info
if: steps.jacocorun.outcome == 'success'
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/jacoco_check_server.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ on:
types: [ opened, edited, synchronize, reopened ]

env:
scalaLong13: 2.13.11
scalaShort13: "2.13"
scalaLong: 2.13.11
scalaShort: "2.13"
overall: 75.0
changed: 75.0

Expand Down Expand Up @@ -58,18 +58,18 @@ jobs:
- name: Build and run tests
continue-on-error: true
id: jacocorun
run: sbt "project server; jacoco"
run: sbt "project server" jacoco
# server module code coverage
- name: Add coverage to PR
if: steps.jacocorun.outcome == 'success'
id: jacoco-server
uses: madrapps/[email protected]
with:
paths: ${{ github.workspace }}/server/target/jvm-${{ env.scalaShort13 }}/jacoco/report/jacoco.xml
paths: ${{ github.workspace }}/server/target/jvm-${{ env.scalaShort }}/jacoco/report/jacoco.xml
token: ${{ secrets.GITHUB_TOKEN }}
min-coverage-overall: ${{env.overall }}
min-coverage-changed-files: ${{ env.changed }}
title: JaCoCo server module code coverage report - scala ${{ env.scalaLong13 }}
title: JaCoCo server module code coverage report - scala ${{ env.scalaLong }}
update-comment: true
- name: Get the Coverage info
if: steps.jacocorun.outcome == 'success'
Expand Down
29 changes: 29 additions & 0 deletions .sbtrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#
# Copyright 2021 ABSA Group Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Aliases in this file expected usage of test file naming conventions:
# - "UnitTests" suffix for test files and Suites which define unit tests
# - "IntegrationTests" suffix for test files and Suites which define integration tests

# CPS QA types aliases
# * Unit tests
alias test=; testOnly *UnitTests

# * Integration tests
alias testIT=; testOnly *IntegrationTests

# Project specific aliases
alias testDB=; project database; testOnly *
50 changes: 48 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,27 @@
# Atum Service

- [Atum Service](#atum-service)
- [Modules](#modules)
- [Agent `agent/`](#agent-agent)
- [Server `server/`](#server-server)
- [Data Model `model/`](#data-model-model)
- [Database `database/`](#database-database)
- [Vocabulary](#vocabulary)
- [Atum Agent](#atum-agent)
- [Partitioning](#partitioning)
- [Atum Context](#atum-context)
- [Measure](#measure)
- [Measurement](#measurement)
- [Checkpoint](#checkpoint)
- [Data Flow](#data-flow)
- [How to generate Code coverage report](#how-to-generate-code-coverage-report)
- [How to Run in IntelliJ](#how-to-run-in-intellij)
- [How to Run Tests](#how-to-run-tests)
- [Test controls](#test-controls)
- [Run Unit Tests](#run-unit-tests)
- [Run Integration Tests](#run-integration-tests)
- [How to Release](#how-to-release)

Atum Service is a data completeness and accuracy application meant to be used for data processed by Apache Spark.

One of the challenges regulated industries face is the requirement to track and prove that their systems preserve
Expand Down Expand Up @@ -130,10 +152,10 @@ even if it involves multiple applications or ETL pipelines.
```sbt
sbt jacoco
```

Code coverage wil be generated on path:
```
{project-root}/atum-service/target/spark{spark_version}-jvm-{scala_version}/jacoco/report/html
{project-root}/atum-service-test/target/jvm-{scala_version}/jacoco/report/html
{project-root}/{module}/target/jvm-{scala_version}/jacoco/report/html
```

## How to Run in IntelliJ
Expand All @@ -142,6 +164,30 @@ To make this project runnable via IntelliJ, do the following:
- Make sure that your configuration in `server/src/main/resources/reference.conf`
is configured according to your needs

## How to Run Tests

### Test controls

See the commands configured in the `.sbtrc` [(link)](https://www.scala-sbt.org/1.x/docs/Best-Practices.html#.sbtrc) file to provide different testing profiles.

### Run Unit Tests
Use the `test` command to execute all unit tests, skipping all other types of tests.
```
sbt test
```

### Run Integration Tests
Use the `testIT` command to execute all Integration tests, skipping all other test types.
```
sbt testIT
```

Use the `testDB` command to execute all Integration tests in `database` module, skipping all other tests and modules.
- Hint: project custom command
```
sbt testDB
```


## How to Release

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import org.scalatest.funsuite.AnyFunSuiteLike
import za.co.absa.atum.agent.AtumContext.AtumPartitions
import za.co.absa.atum.agent.dispatcher.{CapturingDispatcher, ConsoleDispatcher, HttpDispatcher}

class AtumAgentTest extends AnyFunSuiteLike {
class AtumAgentUnitTests extends AnyFunSuiteLike {

test("AtumAgent creates AtumContext(s) as expected") {
val atumAgent = AtumAgent
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import za.co.absa.atum.agent.model.{Measure, MeasureResult, MeasurementBuilder,
import za.co.absa.atum.model.dto.CheckpointDTO
import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType

class AtumContextTest extends AnyFlatSpec with Matchers {
class AtumContextUnitTests extends AnyFlatSpec with Matchers {

"withMeasureAddedOrOverwritten" should "add a new measure if not exists, overwrite it otherwise" in {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import java.util.UUID
import com.github.dwickern.macros.NameOf._
import za.co.absa.atum.agent.dispatcher.CapturingDispatcher.CapturedCall

class CapturingDispatcherTest extends AnyWordSpec with Matchers {
class CapturingDispatcherUnitTests extends AnyWordSpec with Matchers {
private val DefaultProcessStartTime = ZonedDateTime.parse("2024-03-11T00:00:00Z")

private def config(captureLimit: Int): Config = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import za.co.absa.atum.agent.model.AtumMeasure._
import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType
import za.co.absa.spark.commons.test.SparkTestBase

class AtumMeasureTest extends AnyFlatSpec with Matchers with SparkTestBase { self =>
class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase { self =>

"Measure" should "be based on the dataframe" in {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType
import za.co.absa.spark.commons.test.SparkTestBase
import za.co.absa.atum.agent.AtumContext._

class MeasureTest extends AnyFlatSpec with Matchers with SparkTestBase { self =>
class MeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase { self =>

"Measure" should "be based on the dataframe" in {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import za.co.absa.atum.model.dto.{MeasureDTO, MeasureResultDTO, MeasurementDTO}
import za.co.absa.atum.agent.model.AtumMeasure._
import za.co.absa.atum.model.dto.MeasureResultDTO.{ResultValueType, TypedValue}

class MeasurementBuilderTest extends AnyFlatSpec {
class MeasurementBuilderUnitTests extends AnyFlatSpec {

"buildMeasurementDTO" should
"build MeasurementDTO for BigDecimal type of result value when Measure and MeasureResult provided" in {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import org.scalatest.flatspec.AnyFlatSpecLike
import za.co.absa.atum.agent.model.AtumMeasure._
import za.co.absa.atum.model.dto.MeasureDTO

class MeasuresBuilderTest extends AnyFlatSpecLike {
class MeasuresBuilderUnitTests extends AnyFlatSpecLike {

"mapToMeasures" should "map MeasureDTO into Measure for supported measures" in {
val supportedMeasures = Set(
Expand Down
1 change: 0 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ flywayLocations := FlywayConfiguration.flywayLocations
flywaySqlMigrationSuffixes := FlywayConfiguration.flywaySqlMigrationSuffixes
libraryDependencies ++= flywayDependencies


/**
* Module `server` is the service application that collects and stores measured data And upo request retrives them
*/
Expand Down
8 changes: 4 additions & 4 deletions database/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@ docker run --name=atum_db -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=atum_db -
sbt flywayMigrate

# kill & remove docker container (optional; only if using dockerized postgres instance)
docker kill aul_db
docker rm aul_db
docker kill atum_db
docker rm atum_db
```

### Using local postgres instance

- create database `atum_db`
- migrate scripts
```zsh
# migrate scripts
sbt flywayMigrate
```

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import za.co.absa.balta.DBTestSuite
import scala.util.Random


class CreateFlowTest extends DBTestSuite {
class CreateFlowIntegrationTests extends DBTestSuite {
private val fncCreateFlow = "flows._create_flow"

test("Create flow") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import java.util.UUID
import scala.util.Random


class GetFlowCheckpointsTest extends DBTestSuite {
class GetFlowCheckpointsIntegrationTests extends DBTestSuite {
private val fncGetFlowCheckpoints = "flows.get_flow_checkpoints"

case class MeasuredDetails (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import za.co.absa.balta.DBTestSuite
import za.co.absa.balta.classes.JsonBString
import za.co.absa.balta.classes.setter.CustomDBType

class CreateOrUpdateAdditionalDataTest extends DBTestSuite{
class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{

private val fncCreateOrUpdateAdditionalData = "runs.create_or_update_additional_data"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package za.co.absa.atum.database.runs
import za.co.absa.balta.DBTestSuite
import za.co.absa.balta.classes.JsonBString

class CreatePartitioningIfNotExistsTest extends DBTestSuite{
class CreatePartitioningIfNotExistsIntegrationTests extends DBTestSuite{

private val fncCreatePartitioningIfNotExists = "runs.create_partitioning_if_not_exists"

Expand Down
Loading

0 comments on commit cc8892e

Please sign in to comment.