Skip to content

Commit

Permalink
helm testing and linting initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
coreydaley committed Nov 23, 2024
1 parent cc98b24 commit 4cc0ac5
Show file tree
Hide file tree
Showing 13 changed files with 477 additions and 118 deletions.
51 changes: 51 additions & 0 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
name: Lint Charts

on:
pull_request:
paths:
- "charts/**"

jobs:
check-readme:
runs-on: ubuntu-latest
env:
GO111MODULE: on
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # pin@v3

- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # pin@v4
with:
python-version: 3.12

- uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # pin@v3
with:
go-version: ^1

- name: Setup helm-docs
run: go install github.com/norwoodj/helm-docs/cmd/helm-docs@latest

- name: Run pre-commit
uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # [email protected]
with:
extra_args: helm-docs --all-files
check-jsonschema-dereference:
runs-on: ubuntu-latest
env:
GO111MODULE: on
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # pin@v3

- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # pin@v4
with:
python-version: 3.12

- uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # pin@v3
with:
go-version: ^1

- name: Run pre-commit
uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd
with:
extra_args: jsonschema-dereference --all-files
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
private-values.yaml
*~
env
20 changes: 20 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
repos:
- repo: https://github.com/norwoodj/helm-docs
rev: v1.2.0
hooks:
- id: helm-docs
args:
# Make the tool search for charts only under the ``charts` directory
- --chart-search-root=charts
# The `./` makes it relative to the chart-search-root set above
- --template-files=./_templates.gotmpl
# A base filename makes it relative to each chart directory found
- --template-files=README.md.gotmpl
- repo: local
hooks:
- id: jsonschema-dereference
name: jsonschema-dereference
entry: python .pre-commit/jsonschema-dereference.py
additional_dependencies: [jsonref]
language: python
types_or: [yaml, json]
35 changes: 35 additions & 0 deletions .pre-commit/jsonschema-dereference.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
"""Module providing functionality to defereference a JSON schema."""

import sys
import json
from typing import List, Dict, Any
from pathlib import Path

import jsonref

JSONSCHEMA_TEMPLATE_NAME = "values.schema.tmpl.json"
JSONSCHEMA_NAME = "values.schema.json"

def load_template_schema(chart_dir: Path) -> Dict[str, Any]:
"""Load values.schema.tmpl.json and template it via Jinja2."""
with open(chart_dir / JSONSCHEMA_TEMPLATE_NAME, "r", encoding="utf-8") as f:
return json.loads(f.read())

def save(chart_dir: Path, schema_data: Any):
"""Take schema containing $refs and dereference them."""
with open(chart_dir / JSONSCHEMA_NAME, "w", encoding="utf-8") as f:
json.dump(schema_data, f, indent=4, sort_keys=True)

if __name__ == '__main__':
schemas = [p.parent for p in Path(".").rglob(JSONSCHEMA_NAME)]
errors: List[BaseException] = []
for s in schemas:
try:
schema_template = load_template_schema(s)
schema_data = jsonref.replace_refs(schema_template)
save(s, schema_data)
except BaseException as e:
print(f"Could not process schema for '{s}': {e}")
errors.append(e)
if errors:
sys.exit(1)
Empty file added CONTRIBUTING.md
Empty file.
22 changes: 22 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@


##@ Testing & Linting

##@ General

# The help target prints out all targets with their descriptions organized
# beneath their categories. The categories are represented by '##@' and the
# target descriptions by '##'. The awk command is responsible for reading the
# entire set of makefiles included in this invocation, looking for lines of the
# file as xyz: ## something, and then pretty-format the target and help. Then,
# if there's a line with ##@ something, that gets pretty-printed as a category.
# More info on the usage of ANSI control characters for terminal formatting:
# https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters
# More info on the awk command:
# http://linuxcommand.org/lc3_adv_awk.php

.PHONY: help
help: ## Display this help.
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)

.PHONY: help
7 changes: 7 additions & 0 deletions charts/_templates.gotmpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{{ define "chart.valuesTable" }}
| Key | Type | Default | Description |
|-----|------|---------|-------------|
{{- range .Values }}
| {{ .Key }} | {{ .Type }} | {{ if .Default }}{{ .Default }}{{ else }}{{ .AutoDefault }}{{ end }} | {{ if .Description }}{{ .Description }}{{ else }}{{ .AutoDescription }}{{ end }} |
{{- end }}
{{ end }}
37 changes: 33 additions & 4 deletions charts/ai-software-templates/chatbot/Chart.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,37 @@
# The chart API version (required)
apiVersion: v2
# The name of the chart (required)
name: chatbot-ai-sample
# A SemVer 2 version (required)
version: 0.1.1
# A SemVer range of compatible Kubernetes versions (optional)
kubeVersion: ">= 1.27.0-0"
# A single-sentence description of this project (optional)
description: This Helm Chart deploys a Large Language Model (LLM)-enabled [chat bot application](https://github.com/redhat-ai-dev/ai-lab-samples/tree/main/chatbot).
# The type of the chart (optional)
type: application
# A list of keywords about this project (optional)
keywords:
- chatbot
- llama.cpp
- ai-lab
# The URL of this projects home page (optional)
home: https://github.com/redhat-ai-dev/ai-lab-helm-charts
# A list of URLs to source code for this project (optional)
sources:
- https://github.com/redhat-ai-dev/ai-lab-template
# A list of the chart requirements (optional)
dependencies: []
# A list of maintainers of this project (optional)
maintainers:
- name: Red Hat AI Development Team
url: https://github.com/redhat-ai-dev
# A URL to an SVG or PNG image to be used as an icon (optional)
# icon: ""
# The version of the app that this contains (optional). Needn't be SemVer. Quotes recommended.
# appVersion:
# Whether this chart is deprecated (optional, boolean)
deprecated: false
# A list of annotations keyed by name (optional)
annotations:
charts.openshift.io/name: Chatbot AI Sample
description: A Helm chart for the Chatbot AI Sample app. For more information please check https://github.com/redhat-ai-dev/ai-lab-helm-charts.git
name: chatbot-ai-sample
tags: chatbot,llama.cpp,ai-lab
version: 0.1.1
103 changes: 55 additions & 48 deletions charts/ai-software-templates/chatbot/README.md
Original file line number Diff line number Diff line change
@@ -1,78 +1,85 @@


# Chatbot AI Sample Helm Chart
![Version: 0.1.1](https://img.shields.io/badge/Version-0.1.1-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square)

This repo is a Helm chart for building and deploying a Large language model (LLM)-enabled [chat application](https://github.com/redhat-ai-dev/ai-lab-samples/tree/main/chatbot). For more information about helm charts see the official [Helm Charts Documentation](https://helm.sh/).
This Helm Chart deploys a Large Language Model (LLM)-enabled [chat bot application](https://github.com/redhat-ai-dev/ai-lab-samples/tree/main/chatbot).

The deployment flow, will create an application instance, a model server and a github repository with all the application contents in the given github organization. See the [background](#background) section for more information.
## Background

## Requirements
This Helm Chart creates the following components:

- You have a Github APP created with sufficient permissions for the organization that the application repository will be created. Detailed instructions for the github application creation can be found [here](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#github-app).
- You need to have access to a cluster for each operation with OpenShift 4, like deploying and testing.
- The Namespace that your application will run is already created in your cluster.
- Your cluster should have [Openshift Pipelines Operator](https://www.redhat.com/en/technologies/cloud-computing/openshift/pipelines) installed and should be connected to your Github App's webhook. In case your cluster is not configured yet, check the ["Pipelines Configuration Guide"](https://github.com/redhat-ai-dev/ai-lab-helm-charts/blob/main/docs/PIPELINES_CONFIGURATION.md) for further instructions.
- A `key/value` Secret is already created in the Namespace that you are planning to install your helm release, containing a [Github Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) with sufficient access to the given Github Organization. You can find the exact permissions [here](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#procedure). Your Secret's name and the Key of the github token will be provided as values to the helm chart.
### The Model Service
Based on the `llama.cpp` inference server and related to the [ai-lab-recipes model server](https://github.com/containers/ai-lab-recipes/tree/main/model_servers/llamacpp_python).

## Background
### The Application
A [Streamlit](https://github.com/streamlit/streamlit) application to interact with the model service which is based on the related [Chatbot Template](https://github.com/redhat-ai-dev/ai-lab-template/tree/main/templates/chatbot/content).

The chatbot helm chart utilizes two main deployments:
### The GitOps Job
A [job](./templates/application-gitops-job.yaml) which takes care of the creating the application github repository.

1. The model service deployment, based on `llama.cpp` inference server and related to the [ai-lab-recipes model server](https://github.com/containers/ai-lab-recipes/tree/main/model_servers/llamacpp_python).
2. The application deployment, a Streamlit based application to interact with the model service. The application is based on the related [Chatbot Template](https://github.com/redhat-ai-dev/ai-lab-template/tree/main/templates/chatbot/content).
### The Tekton Repository
A [repository](./templates/tekton-repository.yaml) which connects our application with the `pipeline-as-code-controller` which allows us to manage all webhooks received from our GitHub Application.

Apart from the two main deployments, the gitops & OpenShift Pipelines parts are handled by the following:
## Prerequisites

1. The [application-gitops-job](./templates/application-gitops-job.yaml) which takes care of the application github repository creation.
2. The [tekton Repository](./templates/tekton-repository.yaml) which connects our application with the `pipeline-as-code-controller` and we are able to manage all webhooks received from our Github App.
- A [Github Application](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#github-app) with `create repository` permissions for the GitHub Organization where the application will be created.
- Access to an OpenShift 4.x cluster with
- permissions to deploy an application
- an existing Namespace where your application will be deployed
- correctly [installed](https://www.redhat.com/en/technologies/cloud-computing/openshift/pipelines) and [configured](https://github.com/redhat-ai-dev/ai-lab-helm-charts/blob/main/docs/PIPELINES_CONFIGURATION.md) OpenShift Pipelines Operator which is connected to your GitHub Applications webhook
- a Secret (of `key/value` type) in the existing Namespace containing a [Github Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) with [these permissions](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#procedure) to the given Github Organization

## Installation

The helm chart can be directly installed from the OpenShift Dev Console. Check [here](https://docs.redhat.com/en/documentation/openshift_container_platform/4.8/html/building_applications/working-with-helm-charts#understanding-helm) for more information.
### Using the OpenShift Developer Catalog

This Helm Chart can be installed from the [Developer Catalog](https://docs.openshift.com/container-platform/4.17/applications/creating_applications/odc-creating-applications-using-developer-perspective.html#odc-using-the-developer-catalog-to-add-services-or-components_odc-creating-applications-using-developer-perspective) using the [OpenShift Developer Console](https://docs.openshift.com/container-platform/4.17/web_console/web-console-overview.html#about-developer-perspective_web-console-overview).

### Install using Helm
### Using the Helm Install Command

To install the Chatbot AI Sample Helm chart using Helm directly, you can run:
This Helm Chart can be installed via the command line by running the following command:

```
helm upgrade --install <release-name> --namespace <release-namespace> .
```

The `.gitignore` file in this repository filters files named `private-values.yaml`. Thus, you can maintain in
your local fork of this repository a value settings file outside of git management. Copy `values.yaml` in this directory to `private-values.yaml` and make any necessary edits to `private-values.yaml`. Then change your helm invocation to the following:
**NOTE:**
You can create a `private-values.yaml` file that will be ignored by git to pass values to your Helm Chart.
Just copy the existing `values.yaml` file in this directory to `private-values.yaml` and make any necessary edits, then update your installation command as shown below:

```shell
helm upgrade --install <release-name> --namespace <release-namespace> -f ./private-values.yaml .
```

## Values

Below is a table of each value used to configure this chart. Note:

- Your helm release's name will be used to as the name of the application github repository.
## Maintainers

### Application
| Name | Email | Url |
| ---- | ------ | --- |
| Red Hat AI Development Team | | <https://github.com/redhat-ai-dev> |
## Source Code

| Value | Description | Default | Additional Information |
| -------------------------- | ------------------------------------------------------------- | -------------------------------------- | ---------------------- |
| `application.appPort` | The exposed port of the application | 8501 | |
| `application.appContainer` | The initial image used for the chatbot application interface. | `quay.io/redhat-ai-dev/chatbot:latest` | |

### Model
* <https://github.com/redhat-ai-dev/ai-lab-template>
## Requirements

| Value | Description | Default | Additional Information |
| ----------------------------- | --------------------------------------------------------------------- | ------------------------------------------------------- | ---------------------- |
| `model.modelServicePort` | The exposed port of the model service. | 8001 | |
| `model.modelServiceContainer` | The image used for the model service. | `quay.io/ai-lab/llamacpp_python:latest` | |
| `initContainer` | The image used for the initContainer of the model service deployment. | `quay.io/redhat-ai-dev/granite-7b-lab:latest` | |
| `model.modelInitCommand` | The model service initContainer command. | `['/usr/bin/install', '/model/model.file', '/shared/']` | |
| `model.modelPath` | The path of the model file inside the model service container. | `/model/model.file` | |
Kubernetes: `>= 1.27.0-0`

### Gitops
## Values

| Value | Description | Default | Additional Information |
| -------------------------- |-----------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------| ---------------------- |
| `gitops.gitSecretName` | The name of the Secret containing the required Github token. | `git-secrets` | |
| `gitops.gitSecretKeyToken` | The name of the Secret's key with the Github token value. | `GITHUB_TOKEN` | |
| `gitops.githubOrgName` | `[REQUIRED]` The Github Organization name that the chatbot application repository will be created into. | | |
| `gitops.gitSourceRepo` | The Github Repository with the contents of the ai-lab sample chatbot application. It must be either the `redhat-ai-dev/ai-lab-samples` or its fork. | `redhat-ai-dev/ai-lab-samples` |
| `gitops.gitDefaultBranch` | The default branch for the chatbot application Github repository. | `main` | |
| `gitops.quayAccountName` | `[REQUIRED]` The quay.io account that the application image will be pushed. | | |
| Key | Type | Default | Description |
|-----|------|---------|-------------|
| application.appContainer | string | `"quay.io/redhat-ai-dev/chatbot:latest"` | The image used for the initial chatbot application interface |
| application.appPort | int | `8501` | The exposed port of the application |
| gitops.gitDefaultBranch | string | `"main"` | The default branch for the chatbot application Github repository |
| gitops.gitSecretKeyToken | string | `"GITHUB_TOKEN"` | The name of the Secret's key with the Github token value |
| gitops.gitSecretName | string | `"github-secrets"` | The name of the Secret containing the required Github token |
| gitops.gitSourceRepo | string | `"redhat-ai-dev/ai-lab-samples"` | The Github Repository with the contents of the ai-lab sample chatbot application |
| gitops.githubOrgName | string | `""` | [REQUIRED] The Github Organization name that the chatbot application repository will be created in |
| gitops.quayAccountName | string | `""` | [REQUIRED] The quay.io account that the application image will be pushed |
| model.initContainer | string | `"quay.io/redhat-ai-dev/granite-7b-lab:latest"` | The image used for the initContainer of the model service deployment |
| model.modelInitCommand | string | `"['/usr/bin/install', '/model/model.file', '/shared/']"` | The model service initContainer command |
| model.modelPath | string | `"/model/model.file"` | The path of the model file inside the model service container |
| model.modelServiceContainer | string | `"quay.io/ai-lab/llamacpp_python:latest"` | The image used for the model service |
| model.modelServicePort | int | `8001` | The exposed port of the model service |

**NOTE:** Your helm release's name will be used as the name of the application github repository
Loading

0 comments on commit 4cc0ac5

Please sign in to comment.