Skip to content

Commit

Permalink
Merge branch 'Netflix:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
mikansoro authored Sep 9, 2022
2 parents add7812 + d743fcf commit 7481966
Show file tree
Hide file tree
Showing 29 changed files with 1,057 additions and 439 deletions.
10 changes: 6 additions & 4 deletions .github/workflows/build-and-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,14 @@ on:
jobs:
build:
runs-on: ubuntu-latest
env:
ASYNC_TEST_TIMEOUT: 120
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
- name: Set up Python 3.10.5
uses: actions/setup-python@v1
with:
python-version: 3.8
python-version: 3.10.5
- name: Install Terraform
uses: hashicorp/setup-terraform@v1
- name: Install dependencies
Expand Down Expand Up @@ -60,10 +62,10 @@ jobs:
# master builds don't have tags, which breaks setupmeta versioning. This retrieves the tags.
- run: git fetch --prune --unshallow --tags
if: github.ref == 'refs/heads/master'
- name: Set up Python 3.8
- name: Set up Python 3.10.5
uses: actions/setup-python@v1
with:
python-version: 3.8
python-version: 3.10.5
- name: Build UI assets
run: |
curl -sL https://deb.nodesource.com/setup_14.x | sudo bash
Expand Down
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[settings]
known_first_party=consoleme,consoleme_ecs_cdk
known_third_party = aiozipkin,asgiref,atlassian,aws_cdk,bcrypt,billiard,bleach,boto3,botocore,celery,cfnresponse,click,click_log,cloudaux,cryptography,dateutil,deepdiff,distutils,elasticsearch,email_validator,furl,git,google,googleapiclient,jsonschema,jwt,logmatic,marshmallow,mock,mockredis,moto,nacl,nested_stacks,okta_jwt,onelogin,pandas,parliament,password_strength,pkg_resources,policy_sentry,policyuniverse,pydantic,pytest,pytz,questionary,redis,redislite,requests,retrying,ruamel,sentry_sdk,setuptools,simplejson,tenacity,tornado,ujson,uvloop,validate_email,yaml
known_third_party = aiozipkin,asgiref,atlassian,aws_cdk,bcrypt,billiard,bleach,boto3,botocore,celery,cfnresponse,click,click_log,cloudaux,cryptography,dateutil,deepdiff,distutils,elasticsearch,email_validator,fakeredis,furl,git,google,googleapiclient,jsonschema,jwt,logmatic,marshmallow,mock,mockredis,moto,nacl,nested_stacks,okta_jwt,onelogin,pandas,parliament,password_strength,pkg_resources,policy_sentry,policyuniverse,pydantic,pytest,pytz,questionary,redis,requests,retrying,ruamel,sentry_sdk,setuptools,simplejson,tenacity,tornado,ujson,uvloop,validate_email,yaml
multi_line_output=3
include_trailing_comma=True
balanced_wrapping=True
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ repos:
rev: 22.3.0
hooks:
- id: black
language_version: python3.8
language_version: python3.10

- repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.7.0
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Dockerfile should instantiate AWS Project with configurable plugins
FROM python:3.8
FROM python:3.10
MAINTAINER Netflix Security
WORKDIR /apps/consoleme
# NODE_OPTIONS meeded to increase memory size of Node for the `yarn build` step. The Monaco Editor
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ redis:

.PHONY: test
test: clean
ASYNC_TEST_TIMEOUT=60 $(pytest)
ASYNC_TEST_TIMEOUT=60 CONFIG_LOCATION=example_config/example_config_test.yaml $(pytest)

.PHONY: bandit
bandit: clean
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[![Python 3.8](https://img.shields.io/badge/python-3.8-blue.svg)](https://www.python.org/downloads/release/python-386/)
[![Python 3.10](https://img.shields.io/badge/python-3.10-blue.svg)](https://www.python.org/downloads/release/python-3105/)
[![Discord](https://img.shields.io/discord/730908778299523072?label=Discord&logo=discord&style=flat-square)](https://discord.gg/nQVpNGGkYu)

# ConsoleMe
Expand Down
21 changes: 4 additions & 17 deletions consoleme/celery_tasks/celery_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from __future__ import absolute_import

import json # We use a separate SetEncoder here so we cannot use ujson
import os
import sys
import time
from datetime import datetime, timedelta
Expand Down Expand Up @@ -117,25 +116,13 @@ def on_configure(self) -> None:
),
backend=config.get(
f"celery.backend.{config.region}",
config.get("celery.broker.global", "redis://127.0.0.1:6379/2"),
config.get("celery.backend.global"),
),
)

if config.get("redis.use_redislite"):
import tempfile

import redislite

redislite_db_path = os.path.join(
config.get("redis.redislite.db_path", tempfile.NamedTemporaryFile().name)
)
redislite_client = redislite.Redis(redislite_db_path)
redislite_socket_path = f"redis+socket://{redislite_client.socket_file}"
app = Celery(
"tasks",
broker=f"{redislite_socket_path}?virtual_host=1",
backend=f"{redislite_socket_path}?virtual_host=2",
)
broker_transport_options = config.get("celery.broker_transport_options")
if broker_transport_options:
app.conf.update({"broker_transport_options": dict(broker_transport_options)})

app.conf.result_expires = config.get("celery.result_expires", 60)
app.conf.worker_prefetch_multiplier = config.get("celery.worker_prefetch_multiplier", 4)
Expand Down
28 changes: 2 additions & 26 deletions consoleme/config/config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Configuration handling library."""
import collections
import collections.abc
import datetime
import logging
import os
Expand Down Expand Up @@ -35,7 +35,7 @@ def dict_merge(dct: dict, merge_dct: dict):
if (
k in dct
and isinstance(dct[k], dict)
and isinstance(merge_dct[k], collections.Mapping)
and isinstance(merge_dct[k], collections.abc.Mapping)
):
dict_merge(dct[k], merge_dct[k])
else:
Expand Down Expand Up @@ -156,24 +156,6 @@ def __set_flag_on_main_exit(self):
# Main thread exited, signal to other threads
main_exit_flag.set()

def purge_redislite_cache(self):
"""
Purges redislite cache in primary DB periodically. This will force a cache refresh, and it is
convenient for cases where you cannot securely run shared Redis (ie: AWS AppRunner)
"""
if not self.get("redis.use_redislite"):
return
from consoleme.lib.redis import RedisHandler

red = RedisHandler().redis_sync()
while threading.main_thread().is_alive():
red.flushdb()
# Wait till main exit flag is set OR a fixed timeout
if main_exit_flag.wait(
timeout=self.get("redis.purge_redislite_cache_interval", 1800)
):
break

async def merge_extended_paths(self, extends, dir_path):
for s in extends:
extend_config = {}
Expand Down Expand Up @@ -243,10 +225,6 @@ async def load_config(
if allow_start_background_threads:
Timer(0, self.__set_flag_on_main_exit, ()).start()

if allow_start_background_threads and self.get("redis.use_redislite"):
t = Timer(1, self.purge_redislite_cache, ())
t.start()

if allow_start_background_threads and self.get("config.load_from_dynamo", True):
t = Timer(2, self.load_config_from_dynamo_bg_thread, ())
t.start()
Expand Down Expand Up @@ -361,8 +339,6 @@ def set_logging_levels(self):
"spectator.HttpClient": "WARNING",
"spectator.Registry": "WARNING",
"urllib3": "ERROR",
"redislite.client": "WARNING",
"redislite.configuration": "WARNING",
}
for logger, level in self.get("logging_levels", default_logging_levels).items():
logging.getLogger(logger).setLevel(level)
Expand Down
21 changes: 2 additions & 19 deletions consoleme/default_plugins/plugins/celery_tasks/celery_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
"""
import json
import os
from datetime import timedelta

from asgiref.sync import async_to_sync
Expand All @@ -26,31 +25,15 @@
),
backend=config.get(
f"celery.backend.{config.region}",
config.get("celery.broker.global", "redis://127.0.0.1:6379/2"),
config.get("celery.backend.global"),
),
)

if config.get("redis.use_redislite"):
import tempfile

import redislite

redislite_db_path = os.path.join(
config.get("redis.redislite.db_path", tempfile.NamedTemporaryFile().name)
)
redislite_client = redislite.Redis(redislite_db_path)
redislite_socket_path = f"redis+socket://{redislite_client.socket_file}"
app = Celery(
"tasks",
broker=f"{redislite_socket_path}?virtual_host=1",
backend=f"{redislite_socket_path}?virtual_host=2",
)

app.conf.result_expires = config.get("celery.result_expires", 60)
app.conf.worker_prefetch_multiplier = config.get("celery.worker_prefetch_multiplier", 4)
app.conf.task_acks_late = config.get("celery.task_acks_late", True)

if config.get("celery.purge") and not config.get("redis.use_redislite"):
if config.get("celery.purge"):
# Useful to clear celery queue in development
with Timeout(seconds=5, error_message="Timeout: Are you sure Redis is running?"):
app.control.purge()
Expand Down
4 changes: 2 additions & 2 deletions consoleme/lib/plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ def get_plugin_by_name(plugin_name: str) -> Any:
if plugin_name == "default_config":
initial_exception_message = (
f"Could not find the specified plugin: {plugin_name}. "
"Please install it with `pip install -e default_plugins` "
"from the ConsoleMe directory. "
"Please install it with `pip install -e consoleme/default_plugins` "
"from the ConsoleMe base directory. "
)

exception_message = (
Expand Down
19 changes: 0 additions & 19 deletions consoleme/lib/redis.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import os
import sys
import threading
import time
Expand All @@ -13,14 +12,6 @@
from consoleme.config import config
from consoleme.lib.plugins import get_plugin_by_name

if config.get("redis.use_redislite"):
import tempfile

import redislite

if not config.get("redis.redis_lite.db_path"):
default_redislite_db_path = tempfile.NamedTemporaryFile().name

region = config.region
log = config.get_logger()
stats = get_plugin_by_name(config.get("plugins.metrics", "default_metrics"))()
Expand Down Expand Up @@ -371,11 +362,6 @@ def __init__(
self.enabled = False

async def redis(self, db: int = 0) -> Redis:
if config.get("redis.use_redislite"):
REDIS_DB_PATH = os.path.join(
config.get("redis.redislite.db_path", default_redislite_db_path)
)
return redislite.StrictRedis(REDIS_DB_PATH, decode_responses=True)
self.red = await sync_to_async(ConsoleMeRedis)(
host=self.host,
port=self.port,
Expand All @@ -386,11 +372,6 @@ async def redis(self, db: int = 0) -> Redis:
return self.red

def redis_sync(self, db: int = 0) -> Redis:
if config.get("redis.use_redislite"):
REDIS_DB_PATH = os.path.join(
config.get("redis.redislite.db_path", default_redislite_db_path)
)
return redislite.StrictRedis(REDIS_DB_PATH, decode_responses=True)
self.red = ConsoleMeRedis(
host=self.host,
port=self.port,
Expand Down
6 changes: 6 additions & 0 deletions example_config/example_config_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,12 @@ celery:
enabled: true
cache_cloudtrail_denies:
enabled: true
broker:
global: filesystem://.pytest_cache/0
broker_transport_options:
data_folder_in: .pytest_cache/celery_data_folder_in
data_folder_out: .pytest_cache/celery_data_folder_out
data_folder_processed: .pytest_cache/celery_data_folder_processed
event_bridge:
detect_role_changes_and_update_cache:
queue_arn: arn:aws:sqs:{region}:123456789012:consoleme-cloudtrail-role-events-test
Expand Down
43 changes: 22 additions & 21 deletions requirements-docs.txt
Original file line number Diff line number Diff line change
@@ -1,55 +1,56 @@
#
# This file is autogenerated by pip-compile
# This file is autogenerated by pip-compile with python 3.10
# To update, run:
#
# pip-compile --no-emit-index-url --output-file=requirements-docs.txt requirements-docs.in
#
click==8.0.3
click==8.1.3
# via
# -c requirements-test.txt
# -c requirements.txt
# mkdocs
ghp-import==2.0.1
ghp-import==2.1.0
# via mkdocs
importlib-metadata==4.8.1
importlib-metadata==4.12.0
# via mkdocs
jinja2==3.0.1
jinja2==3.1.2
# via
# -c requirements-test.txt
# -c requirements.txt
# mkdocs
markdown==3.3.4
# mkdocs-material
markdown==3.3.7
# via
# mkdocs
# mkdocs-material
# pymdown-extensions
markupsafe==2.0.1
markupsafe==2.1.1
# via
# -c requirements-test.txt
# -c requirements.txt
# jinja2
mergedeep==1.3.4
# via mkdocs
mkdocs-material-extensions==1.0.3
# via mkdocs-material
mkdocs-material==7.2.6
# via -r requirements-docs.in
mkdocs==1.2.3
mkdocs==1.3.0
# via
# -r requirements-docs.in
# mkdocs-material
packaging==21.0
mkdocs-material==8.3.9
# via -r requirements-docs.in
mkdocs-material-extensions==1.0.3
# via mkdocs-material
packaging==21.3
# via
# -c requirements-test.txt
# -c requirements.txt
# mkdocs
pygments==2.10.0
pygments==2.12.0
# via
# -c requirements-test.txt
# mkdocs-material
pymdown-extensions==8.2
pymdown-extensions==9.5
# via mkdocs-material
pyparsing==2.4.7
pyparsing==3.0.9
# via
# -c requirements-test.txt
# -c requirements.txt
Expand All @@ -59,21 +60,21 @@ python-dateutil==2.8.2
# -c requirements-test.txt
# -c requirements.txt
# ghp-import
pyyaml-env-tag==0.1
# via mkdocs
pyyaml==5.4.1
pyyaml==6.0
# via
# -c requirements-test.txt
# -c requirements.txt
# -r requirements-docs.in
# mkdocs
# pyyaml-env-tag
pyyaml-env-tag==0.1
# via mkdocs
six==1.16.0
# via
# -c requirements-test.txt
# -c requirements.txt
# python-dateutil
watchdog==2.1.5
watchdog==2.1.9
# via mkdocs
zipp==3.5.0
zipp==3.8.1
# via importlib-metadata
4 changes: 2 additions & 2 deletions requirements-test.in
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ bandit
black
coverage
docker # Required by moto
fakeredis
flake8
flake8-docstrings
flake8-import-order
mock<4 # Mock 4 breaks a bunch of unit tests
mock
mockredispy
moto
mypy
Expand All @@ -20,7 +21,6 @@ pytest-mock
pytest-timeout
pytest-tornado
pytest-xdist
redislite
readme_renderer
tornado
tox
Loading

0 comments on commit 7481966

Please sign in to comment.