Skip to content

Commit

Permalink
fix: checks | docs: included tests and assets
Browse files Browse the repository at this point in the history
  • Loading branch information
Willian Cesar Cincerre Da Silva authored and willianccs committed Jan 16, 2025
1 parent 7e5e724 commit 6524032
Show file tree
Hide file tree
Showing 7 changed files with 158 additions and 62 deletions.
Binary file added resilience4j/assets/resilience4j.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
6 changes: 3 additions & 3 deletions resilience4j/assets/service_checks.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
{
"agent_version": "7.59.0",
"integration": "resilience4j",
"check": "resilience4j.openmetrics.health",
"check": "resilience4j.prometheus.health",
"statuses": [
"ok",
"critical"
Expand All @@ -11,7 +11,7 @@
"host",
"endpoint"
],
"name": "Resilience4j OpenMetrics endpoint health",
"description": "Returns `CRITICAL` if the Agent is unable to connect to the Resilience4j OpenMetrics endpoint, otherwise returns `OK`."
"name": "Resilience4j endpoint health",
"description": "Returns `CRITICAL` if the Agent is unable to connect to the Resilience4j endpoint, otherwise returns `OK`."
}
]
39 changes: 39 additions & 0 deletions resilience4j/datadog_checks/resilience4j/resilience4j.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from datadog_checks.base import OpenMetricsBaseCheck, ConfigurationError

from .metrics import METRIC_MAP


class Resilience4jCheck(OpenMetricsBaseCheck):
DEFAULT_METRIC_LIMIT = 0

def __init__(self, name, init_config, instances):
default_instances = {
'resilience4j': {
'metrics': [METRIC_MAP],
'send_distribution_sums_as_monotonic': 'true',
'send_distribution_counts_as_monotonic': 'true',
}
}

super(Resilience4jCheck, self).__init__(
name, init_config, instances, default_instances=default_instances, default_namespace='resilience4j'
)

def _http_check(self, url, check_name):
try:
response = self.http.get(url)
response.raise_for_status()
except Exception as e:
self.service_check(check_name, self.CRITICAL, message=str(e))
else:
if response.status_code == 200:
self.service_check(check_name, self.OK)
else:
self.service_check(check_name, self.WARNING)

def check(self, instance):
prometheus_url = instance.get("prometheus_url")
if prometheus_url is None:
raise ConfigurationError("Each instance must have a url to the metrics endpoint")

super(Resilience4jCheck, self).check(instance)
37 changes: 14 additions & 23 deletions resilience4j/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,29 @@
import os
from unittest import mock

import mock
import pytest

from datadog_checks.dev import docker_run, get_here
from datadog_checks.resilience4j.check import Resilience4jCheck

INSTANCE_URL="http://localhost:9080/actuator/prometheus"
HERE = get_here()
DOCKER_DIR = os.path.join(HERE, 'docker')

@pytest.fixture(scope="session")

@pytest.fixture(scope='session')
def dd_environment():
yield
compose_file = os.path.join(DOCKER_DIR, 'docker-compose.yaml')

with docker_run(compose_file, log_patterns=[r'Successfully started Resilience4j!']):
instances = {'instances': [{'prometheus_url': INSTANCE_URL}]}
yield instances


@pytest.fixture
def instance():
return {
"prometheus_url": "http://localhost:3000/metrics",
"prometheus_url": INSTANCE_URL,
}


Expand All @@ -23,24 +32,6 @@ def check(instance):
return Resilience4jCheck("resilience4j", {}, [instance])


# @pytest.fixture()
# def mock_micrometer_metrics():
# fixture_file = os.path.join(os.path.dirname(__file__), "fixtures", "metrics-micrometer.txt")

# with open(fixture_file, "r") as f:
# content = f.read()

# with mock.patch(
# "requests.get",
# return_value=mock.MagicMock(
# status_code=200,
# iter_lines=lambda **kwargs: content.split("\n"),
# headers={"Content-Type": "text/plain"},
# ),
# ):
# yield


@pytest.fixture()
def mock_prometheus_metrics():
fixture_file = os.path.join(os.path.dirname(__file__), "fixtures", "metrics-prometheus.txt")
Expand All @@ -56,4 +47,4 @@ def mock_prometheus_metrics():
headers={"Content-Type": "text/plain"},
),
):
yield
yield
59 changes: 59 additions & 0 deletions resilience4j/tests/conftest_local.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import os
from unittest import mock

import pytest

from datadog_checks.resilience4j.check import Resilience4jCheck


@pytest.fixture(scope="session")
def dd_environment():
yield


@pytest.fixture
def instance():
return {
"prometheus_url": "http://localhost:3000/metrics",
}


@pytest.fixture
def check(instance):
return Resilience4jCheck("resilience4j", {}, [instance])


# @pytest.fixture()
# def mock_micrometer_metrics():
# fixture_file = os.path.join(os.path.dirname(__file__), "fixtures", "metrics-micrometer.txt")

# with open(fixture_file, "r") as f:
# content = f.read()

# with mock.patch(
# "requests.get",
# return_value=mock.MagicMock(
# status_code=200,
# iter_lines=lambda **kwargs: content.split("\n"),
# headers={"Content-Type": "text/plain"},
# ),
# ):
# yield


@pytest.fixture()
def mock_prometheus_metrics():
fixture_file = os.path.join(os.path.dirname(__file__), "fixtures", "metrics-prometheus.txt")

with open(fixture_file, "r") as f:
content = f.read()

with mock.patch(
"requests.get",
return_value=mock.MagicMock(
status_code=200,
iter_lines=lambda **kwargs: content.split("\n"),
headers={"Content-Type": "text/plain"},
),
):
yield
39 changes: 39 additions & 0 deletions resilience4j/tests/docker/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
version: "3"

# Simple service demonstrating the use of resilience4j
services:
resilience4j-demo:
container_name: resilience4j
image: ghcr.io/willianccs/sample-demo-resilience4j-1.0@sha256:f2c814df19ff41eaf2e0f92ea0b5806cb1e6b9869db62d791ab2f51a3d2a0a84
ports:
- "9080:9080"
environment:
- SPRING_PROFILES_ACTIVE=docker
healthcheck:
test: curl --fail http://resilience4j-demo:9080/actuator/health || exit 1
interval: 40s
timeout: 30s
retries: 3
start_period: 60s

tester:
image: alpine
depends_on:
- resilience4j-demo
command: >
sh -c "
apk add --no-cache curl &&
echo 'Requests Success' &&
curl -s http://resilience4j-demo:9080/backendA/success &&
echo 'Requests Fail' &&
for i in 1 2 3 4; do
curl -s http://resilience4j-demo:9080/backendA/failure;
done &&
echo 'Validate Fallback' &&
curl -s http://resilience4j-demo:9080/backendA/fallback &&
echo 'Circuitbreaker closed' &&
for i in 1 2 3; do
curl -s http://resilience4j-demo:9080/backendA/success;
done
"
restart: always
40 changes: 4 additions & 36 deletions resilience4j/tests/test_resilience4j.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,27 +3,6 @@
from datadog_checks.dev.utils import get_metadata_metrics
from datadog_checks.resilience4j import Resilience4jCheck

# EXPECTED_MICROMETER_METRICS = {
# "resilience4j.bulkhead.available.concurrent.calls",
# "resilience4j.bulkhead.max.allowed.concurrent.calls",
# "resilience4j.bulkhead.max.thread.pool.size",
# "resilience4j.bulkhead.queue.capacity",
# "resilience4j.bulkhead.queue.depth",
# "resilience4j.bulkhead.thread.pool.size",
# "resilience4j.circuitbreaker.buffered.calls",
# "resilience4j.circuitbreaker.calls.seconds.count",
# "resilience4j.circuitbreaker.calls.seconds.max",
# "resilience4j.circuitbreaker.calls.seconds.sum",
# # "resilience4j.circuitbreaker.calls",
# "resilience4j.circuitbreaker.failure.rate",
# "resilience4j.circuitbreaker.max.buffered.calls",
# "resilience4j.circuitbreaker.state",
# "resilience4j.ratelimiter.available.permissions",
# "resilience4j.ratelimiter.waiting.threads",
# "resilience4j.retry.calls",
# "resilience4j.timelimiter.calls.count",
# }

EXPECTED_PROMETHEUS_METRICS = {
"resilience4j.bulkhead.available.concurrent.calls",
"resilience4j.bulkhead.max.allowed.concurrent.calls",
Expand All @@ -45,22 +24,11 @@
"resilience4j.timelimiter.calls",
}


# @pytest.mark.unit
# def test_mock_assert_micrometer_metrics(dd_run_check, aggregator, check, mock_micrometer_metrics):
# dd_run_check(check)
# for metric_name in EXPECTED_MICROMETER_METRICS:
# aggregator.assert_metric(metric_name)
# aggregator.assert_all_metrics_covered()
# aggregator.assert_metrics_using_metadata(get_metadata_metrics())
# aggregator.assert_service_check("resilience4j.openmetrics.health", status=Resilience4jCheck.OK)


@pytest.mark.unit
def test_mock_assert_prometheus_metrics(dd_run_check, aggregator, check, mock_prometheus_metrics):
@pytest.mark.integration
def test_check(dd_run_check, aggregator, check):
dd_run_check(check)
for metric_name in EXPECTED_PROMETHEUS_METRICS:
aggregator.assert_metric(metric_name)
aggregator.assert_metric(metric_name, at_least=0)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics())
# aggregator.assert_service_check("resilience4j.openmetrics.health", status=Resilience4jCheck.OK)
aggregator.assert_service_check("resilience4j.prometheus.health", status=Resilience4jCheck.OK)

0 comments on commit 6524032

Please sign in to comment.