Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: authz schema changes for data connectors #615

Draft
wants to merge 6 commits into
base: build-project-authz-disentangle
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,16 @@
"workspaceFolder": "/workspace",
"shutdownAction": "stopCompose",
"features": {
"ghcr.io/devcontainers-contrib/features/poetry:2": {},
"ghcr.io/devcontainers-contrib/features/bash-command:1": {
"ghcr.io/devcontainers-extra/features/poetry:2": {},
"ghcr.io/devcontainers-extra/features/bash-command:1": {
"command": "poetry self add poetry-polylith-plugin"
},
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
"ghcr.io/devcontainers-contrib/features/gh-release:1": {
"ghcr.io/devcontainers-extra/features/gh-release:1": {
"repo": "authzed/zed",
"binaryNames": "zed"
},
"ghcr.io/devcontainers-contrib/features/spicedb:1": {},
"ghcr.io/devcontainers-extra/features/spicedb:1": {},
"ghcr.io/devcontainers/features/kubectl-helm-minikube:1": {
"minikube": "none"
},
Expand All @@ -27,12 +27,12 @@
"ghcr.io/EliiseS/devcontainer-features/bash-profile:1": {
"command": "alias k=kubectl"
},
"ghcr.io/devcontainers-contrib/features/rclone:1": {},
"ghcr.io/devcontainers-extra/features/rclone:1": {},
"./k3d": {}
},
"overrideFeatureInstallOrder": [
"ghcr.io/devcontainers-contrib/features/poetry",
"ghcr.io/devcontainers-contrib/features/bash-command"
"ghcr.io/devcontainers-extra/features/poetry",
"ghcr.io/devcontainers-extra/features/bash-command"
],
"postCreateCommand": "poetry install --with dev && mkdir -p /home/vscode/.config/k9s",
"customizations": {
Expand Down
75 changes: 75 additions & 0 deletions components/renku_data_services/authz/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,3 +440,78 @@ def generate_v4(public_project_ids: Iterable[str]) -> AuthzSchemaMigration:
)

return AuthzSchemaMigration(up=up, down=down)


_v5: str = "to be added by Eike in a separate PR"

v5 = AuthzSchemaMigration(
up=[WriteSchemaRequest(schema=_v5)],
down=[WriteSchemaRequest(schema=_v4)],
)

_v6: str = """\
definition user {}

definition group {
relation group_platform: platform
relation owner: user
relation editor: user
relation viewer: user
relation public_viewer: user:* | anonymous_user:*
permission read = public_viewer + read_children
permission read_children = viewer + write
permission write = editor + delete
permission change_membership = delete
permission delete = owner + group_platform->is_admin
}

definition user_namespace {
relation user_namespace_platform: platform
relation owner: user
relation public_viewer: user:* | anonymous_user:*
permission read = public_viewer + read_children
permission read_children = delete
permission write = delete
permission delete = owner + user_namespace_platform->is_admin
}

definition anonymous_user {}

definition platform {
relation admin: user
permission is_admin = admin
}

definition project {
relation project_platform: platform
relation project_namespace: user_namespace | group
relation owner: user
relation editor: user
relation viewer: user
relation public_viewer: user:* | anonymous_user:*
permission read = public_viewer + read_children
permission read_children = viewer + write + project_namespace->read_children
permission write = editor + delete + project_namespace->write
permission change_membership = delete
permission delete = owner + project_platform->is_admin + project_namespace->delete
}

definition data_connector {
relation data_connector_platform: platform
relation data_connector_namespace: user_namespace | group | project
relation linked_to: project
relation owner: user
relation editor: user
relation viewer: user
relation public_viewer: user:* | anonymous_user:*
permission read = public_viewer + viewer + write + data_connector_namespace->read
permission write = editor + delete + data_connector_namespace->write
permission change_membership = delete
permission delete = owner + data_connector_platform->is_admin + data_connector_namespace->delete
}"""

v6 = AuthzSchemaMigration(
up=[WriteSchemaRequest(schema=_v6)],
# TODO: change to v5 when the search changes are merged
down=[WriteSchemaRequest(schema=_v4)],
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
"""bootstrap initial global environments

Mainly used for CI deployments so they have a envs for testing.

Revision ID: 450ae3930996
Revises: d71f0f795d30
Create Date: 2025-02-07 02:34:53.408066

"""

import logging
from dataclasses import dataclass

import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects.postgresql import JSONB

from renku_data_services.base_models.core import InternalServiceAdmin

JSONVariant = sa.JSON().with_variant(JSONB(), "postgresql")
# revision identifiers, used by Alembic.
revision = "450ae3930996"
down_revision = "d71f0f795d30"
branch_labels = None
depends_on = None


@dataclass
class Environment:
name: str
container_image: str
default_url: str
port: int = 8888
description: str = ""
working_directory: str | None = None
mount_directory: str | None = None
uid: int = 1000
gid: int = 1000
args: list[str] | None = None
command: list[str] | None = None


GLOBAL_ENVIRONMENTS = [
Environment(
name="Python/Jupyter",
description="Standard python environment",
container_image="renku/renkulab-py:latest",
default_url="/lab",
working_directory="/home/jovyan/work",
mount_directory="/home/jovyan/work",
port=8888,
uid=1000,
gid=100,
command=["sh", "-c"],
args=[
'/entrypoint.sh jupyter server --ServerApp.ip=0.0.0.0 --ServerApp.port=8888 --ServerApp.base_url=$RENKU_BASE_URL_PATH --ServerApp.token="" --ServerApp.password="" --ServerApp.allow_remote_access=true --ContentsManager.allow_hidden=true --ServerApp.allow_origin=* --ServerApp.root_dir="/home/jovyan/work"'
],
),
Environment(
name="Rstudio",
description="Standard R environment",
container_image="renku/renkulab-r:latest",
default_url="/rstudio",
working_directory="/home/jovyan/work",
mount_directory="/home/jovyan/work",
port=8888,
uid=1000,
gid=100,
command=["sh", "-c"],
args=[
'/entrypoint.sh jupyter server --ServerApp.ip=0.0.0.0 --ServerApp.port=8888 --ServerApp.base_url=$RENKU_BASE_URL_PATH --ServerApp.token="" --ServerApp.password="" --ServerApp.allow_remote_access=true --ContentsManager.allow_hidden=true --ServerApp.allow_origin=* --ServerApp.root_dir="/home/jovyan/work"'
],
),
]


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
connection = op.get_bind()

logging.info("creating global environments")
env_stmt = sa.select(sa.column("id", type_=sa.String)).select_from(sa.table("environments", schema="sessions"))
existing_envs = connection.execute(env_stmt).all()
if existing_envs:
logging.info("skipping environment creation as there already are existing environments")
return
for env in GLOBAL_ENVIRONMENTS:
op.execute(
sa.text(
"""INSERT INTO sessions.environments(
id,
name, description,
created_by_id,
creation_date,
container_image,
default_url,
port,
working_directory,
mount_directory,
uid,
gid,
args,
command,
environment_kind
)VALUES (
generate_ulid(),
:name,
:description,
:created_by_id,
now(),
:container_image,
:default_url,
:port,
:working_directory,
:mount_directory,
:uid,
:gid,
:args,
:command,
'GLOBAL'
)""" # nosec: B608
).bindparams(
sa.bindparam("name", value=env.name, type_=sa.Text),
sa.bindparam("description", value=env.description, type_=sa.Text),
sa.bindparam("created_by_id", value=InternalServiceAdmin.id, type_=sa.Text),
sa.bindparam("container_image", value=env.container_image, type_=sa.Text),
sa.bindparam("default_url", value=env.default_url, type_=sa.Text),
sa.bindparam("port", value=env.port, type_=sa.Integer),
sa.bindparam("working_directory", value=env.working_directory, type_=sa.Text),
sa.bindparam("mount_directory", value=env.mount_directory, type_=sa.Text),
sa.bindparam("uid", value=env.uid, type_=sa.Integer),
sa.bindparam("gid", value=env.gid, type_=sa.Integer),
sa.bindparam("args", value=env.args, type_=JSONVariant),
sa.bindparam("command", value=env.command, type_=JSONVariant),
)
)
logging.info(f"created global environment {env.name}")

except Exception:
logging.exception("creation of intial global environments failed")

# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
"""Migrate authz schema to v6

Revision ID: 483af0d70cf4
Revises: 450ae3930996

Create Date: 2025-01-22 10:37:40.218992

"""

import logging

from renku_data_services.authz.config import AuthzConfig
from renku_data_services.authz.schemas import v6

# revision identifiers, used by Alembic.
revision = "483af0d70cf4"
down_revision = "450ae3930996"
branch_labels = None
depends_on = None


def upgrade() -> None:
config = AuthzConfig.from_env()
client = config.authz_client()
responses = v6.upgrade(client)
logging.info(
f"Finished upgrading the Authz schema to version 6 in Alembic revision {revision}, response: {responses}"
)


def downgrade() -> None:
config = AuthzConfig.from_env()
client = config.authz_client()
responses = v6.downgrade(client)
logging.info(
f"Finished downgrading the Authz schema from version 6 in Alembic revision {revision}, response: {responses}"
)
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ async def main_container(
container = client.V1Container(
image=config.sessions.git_proxy.image,
security_context={
"fsGroup": 100,
"runAsGroup": 1000,
"runAsUser": 1000,
"allowPrivilegeEscalation": False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ async def git_clone_container_v2(
workspace_mount_path: PurePosixPath,
work_dir: PurePosixPath,
lfs_auto_fetch: bool = False,
uid: int = 1000,
gid: int = 1000,
) -> dict[str, Any] | None:
"""Returns the specification for the container that clones the user's repositories for new operator."""
amalthea_session_work_volume: str = "amalthea-volume"
Expand Down Expand Up @@ -136,9 +138,8 @@ async def git_clone_container_v2(
},
"securityContext": {
"allowPrivilegeEscalation": False,
"fsGroup": 100,
"runAsGroup": 100,
"runAsUser": 1000,
"runAsGroup": gid,
"runAsUser": uid,
"runAsNonRoot": True,
},
"volumeMounts": [
Expand Down Expand Up @@ -261,7 +262,6 @@ async def git_clone_container(server: "UserServer") -> dict[str, Any] | None:
},
"securityContext": {
"allowPrivilegeEscalation": False,
"fsGroup": 100,
"runAsGroup": 100,
"runAsUser": 1000,
"runAsNonRoot": True,
Expand Down
5 changes: 3 additions & 2 deletions components/renku_data_services/notebooks/blueprints.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,6 @@ async def _handler(
user=user,
data_connectors_stream=data_connectors_stream,
work_dir=work_dir,
storage_mount=storage_mount,
cloud_storage_overrides=body.cloudstorage or [],
user_repo=self.user_repo,
)
Expand All @@ -333,6 +332,8 @@ async def _handler(
git_providers,
storage_mount,
work_dir,
uid=environment.uid,
gid=environment.gid,
)
extra_containers = await get_extra_containers(self.nb_config, user, repositories, git_providers)
extra_volumes.extend(extra_init_volumes_dc)
Expand All @@ -341,7 +342,7 @@ async def _handler(
base_server_url = self.nb_config.sessions.ingress.base_url(server_name)
base_server_path = self.nb_config.sessions.ingress.base_path(server_name)
ui_path: str = (
f"{base_server_path.rstrip("/")}/{environment.default_url.lstrip("/")}"
f"{base_server_path.rstrip('/')}/{environment.default_url.lstrip('/')}"
if len(environment.default_url) > 0
else base_server_path
)
Expand Down
Loading
Loading