diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 0000000..b5e06a6 --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,52 @@ +name: Lint Charts + +on: + pull_request: + paths: + - "charts/**" + +jobs: + check-readme: + runs-on: ubuntu-latest + env: + GO111MODULE: on + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # pin@v3 + + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # pin@v4 + with: + python-version: 3.12 + + - uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # pin@v3 + with: + go-version: ^1 + + - name: Setup helm-docs + run: go install github.com/norwoodj/helm-docs/cmd/helm-docs@latest + + - name: Run pre-commit + uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # pin@v3.0.1 + with: + extra_args: helm-docs --all-files + + check-jsonschema-dereference: + runs-on: ubuntu-latest + env: + GO111MODULE: on + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # pin@v3 + + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # pin@v4 + with: + python-version: 3.12 + + - uses: actions/setup-go@41dfa10bad2bb2ae585af6ee5bb4d7d973ad74ed # pin@v3 + with: + go-version: ^1 + + - name: Run pre-commit + uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd + with: + extra_args: jsonschema-dereference --all-files diff --git a/.gitignore b/.gitignore index 4083855..8997636 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ private-values.yaml *~ +env diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..3f9659a --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,20 @@ +repos: + - repo: https://github.com/norwoodj/helm-docs + rev: v1.2.0 + hooks: + - id: helm-docs + args: + # Make the tool search for charts only under the ``charts` directory + - --chart-search-root=charts + # The `./` makes it relative to the chart-search-root set above + - --template-files=./_templates.gotmpl + # A base filename makes it relative to each chart directory found + - --template-files=README.md.gotmpl + - repo: local + hooks: + - id: jsonschema-dereference + name: jsonschema-dereference + entry: python .pre-commit/jsonschema_dereference.py + additional_dependencies: [jsonref] + language: python + types_or: [yaml, json] diff --git a/.pre-commit/jsonschema_dereference.py b/.pre-commit/jsonschema_dereference.py new file mode 100644 index 0000000..48aeec5 --- /dev/null +++ b/.pre-commit/jsonschema_dereference.py @@ -0,0 +1,54 @@ +""" +This Python module: + - Searches for JSON Schema templates with the name values.schema.tmpl.json + - Dereferences any $refs contained in those files + - Outputs the new Schema to a values.schema.json file in the same directory +""" + +import sys +import json +from typing import List, Dict, Any +from pathlib import Path + +# External library dependency +# Install with 'pip install jsonref' +import jsonref + +# File to write the dereferenced JSON Schema to +JSONSCHEMA_NAME = "values.schema.json" +# File that contains the JSON Schema that needs dereferencing +JSONSCHEMA_TEMPLATE_NAME = "values.schema.tmpl.json" + +def load_template_schema(schema_dir: Path) -> Dict[str, Any]: + """Load the schema template values.schema.tmpl.json""" + with open(schema_dir / JSONSCHEMA_TEMPLATE_NAME, "r", encoding="utf-8") as f: + return json.loads(f.read()) + +def save(schema_dir: Path, schema_data: Any) -> None: + """Save the dereferenced schema to values.schema.json""" + with open(schema_dir / JSONSCHEMA_NAME, "w", encoding="utf-8") as f: + json.dump(schema_data, f, indent=4, sort_keys=True) + +if __name__ == '__main__': + # Search for all values.schema.tmpl.json files + schema_templates = [p.parent for p in Path(".").rglob(JSONSCHEMA_TEMPLATE_NAME)] + + # Create a list to hold any exceptions + errors: List[BaseException] = [] + # Iterate over the List of found schema templates + for schema_template in schema_templates: + try: + # Load the schema into a variable as JSON + st = load_template_schema(schema_template) + # Dereference all of the $refs + s = jsonref.replace_refs(st) + # Save the dereferenced JSON + save(schema_template, s) + except BaseException as e: + # Print any errors to the screen + print(f"Could not process schema for '{schema_template}': {e}") + # Append any exceptions to the errors List + errors.append(e) + if errors: + # Exit with status 1 if any exceptions were thrown + sys.exit(1) diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..52a9029 --- /dev/null +++ b/Makefile @@ -0,0 +1,36 @@ +##@ Generation + +.PHONY: generate +generate: ## Run all generation commands + pre-commit run --all-files + +.PHONY: helm-docs +helm-docs: ## Generate README.md from the README.md.gotmpl file + pre-commit run helm-docs --all-files + +.PHONY: jsonschema-dereference +jsonschema-dereference: ## Generate values.schema.json from the values.schema.tmpl.json file + pre-commit run jsonschema-dereference --all-files + +##@ Scripts + +.PHONY: install-pre-commit +install-pre-commit: ## Install the pre-commit script + pre-commit install + +##@ General + +# The help target prints out all targets with their descriptions organized +# beneath their categories. The categories are represented by '##@' and the +# target descriptions by '##'. The awk command is responsible for reading the +# entire set of makefiles included in this invocation, looking for lines of the +# file as xyz: ## something, and then pretty-format the target and help. Then, +# if there's a line with ##@ something, that gets pretty-printed as a category. +# More info on the usage of ANSI control characters for terminal formatting: +# https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters +# More info on the awk command: +# http://linuxcommand.org/lc3_adv_awk.php + +.PHONY: help +help: ## Display this help. + @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) diff --git a/charts/_templates.gotmpl b/charts/_templates.gotmpl new file mode 100644 index 0000000..136a5f4 --- /dev/null +++ b/charts/_templates.gotmpl @@ -0,0 +1,7 @@ +{{ define "chart.valuesTable" }} +| Key | Type | Default | Description | +|-----|------|---------|-------------| +{{- range .Values }} +| {{ .Key }} | {{ .Type }} | {{ if .Default }}{{ .Default }}{{ else }}{{ .AutoDefault }}{{ end }} | {{ if .Description }}{{ .Description }}{{ else }}{{ .AutoDescription }}{{ end }} | +{{- end }} +{{ end }} diff --git a/charts/ai-software-templates/chatbot/Chart.yaml b/charts/ai-software-templates/chatbot/Chart.yaml index b7b0274..e42cc67 100644 --- a/charts/ai-software-templates/chatbot/Chart.yaml +++ b/charts/ai-software-templates/chatbot/Chart.yaml @@ -1,8 +1,37 @@ +# The chart API version (required) apiVersion: v2 +# The name of the chart (required) +name: chatbot-ai-sample +# A SemVer 2 version (required) +version: 0.1.1 +# A SemVer range of compatible Kubernetes versions (optional) kubeVersion: ">= 1.27.0-0" +# A single-sentence description of this project (optional) +description: This Helm Chart deploys a Large Language Model (LLM)-enabled [chat bot application](https://github.com/redhat-ai-dev/ai-lab-samples/tree/main/chatbot). +# The type of the chart (optional) +type: application +# A list of keywords about this project (optional) +keywords: + - chatbot + - llama.cpp + - ai-lab +# The URL of this projects home page (optional) +home: https://github.com/redhat-ai-dev/ai-lab-helm-charts +# A list of URLs to source code for this project (optional) +sources: + - https://github.com/redhat-ai-dev/ai-lab-template +# A list of the chart requirements (optional) +dependencies: [] +# A list of maintainers of this project (optional) +maintainers: + - name: Red Hat AI Development Team + url: https://github.com/redhat-ai-dev +# A URL to an SVG or PNG image to be used as an icon (optional) +# icon: "" +# The version of the app that this contains (optional). Needn't be SemVer. Quotes recommended. +# appVersion: +# Whether this chart is deprecated (optional, boolean) +deprecated: false +# A list of annotations keyed by name (optional) annotations: charts.openshift.io/name: Chatbot AI Sample -description: A Helm chart for the Chatbot AI Sample app. For more information please check https://github.com/redhat-ai-dev/ai-lab-helm-charts.git -name: chatbot-ai-sample -tags: chatbot,llama.cpp,ai-lab -version: 0.1.1 diff --git a/charts/ai-software-templates/chatbot/README.md b/charts/ai-software-templates/chatbot/README.md index 50fb87b..e7fc6d4 100644 --- a/charts/ai-software-templates/chatbot/README.md +++ b/charts/ai-software-templates/chatbot/README.md @@ -1,78 +1,85 @@ + + # Chatbot AI Sample Helm Chart +![Version: 0.1.1](https://img.shields.io/badge/Version-0.1.1-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) -This repo is a Helm chart for building and deploying a Large language model (LLM)-enabled [chat application](https://github.com/redhat-ai-dev/ai-lab-samples/tree/main/chatbot). For more information about helm charts see the official [Helm Charts Documentation](https://helm.sh/). +This Helm Chart deploys a Large Language Model (LLM)-enabled [chat bot application](https://github.com/redhat-ai-dev/ai-lab-samples/tree/main/chatbot). -The deployment flow, will create an application instance, a model server and a github repository with all the application contents in the given github organization. See the [background](#background) section for more information. +## Background -## Requirements +This Helm Chart creates the following components: -- You have a Github APP created with sufficient permissions for the organization that the application repository will be created. Detailed instructions for the github application creation can be found [here](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#github-app). -- You need to have access to a cluster for each operation with OpenShift 4, like deploying and testing. -- The Namespace that your application will run is already created in your cluster. -- Your cluster should have [Openshift Pipelines Operator](https://www.redhat.com/en/technologies/cloud-computing/openshift/pipelines) installed and should be connected to your Github App's webhook. In case your cluster is not configured yet, check out our [installation](../pipeline-install/README.md) and [setup](../pipeline-setup/README.md) helm charts doing this. -- A `key/value` Secret is already created in the Namespace that you are planning to install your helm release, containing a [Github Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) with sufficient access to the given Github Organization. You can find the exact permissions [here](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#procedure). Your Secret's name and the Key of the github token will be provided as values to the helm chart. +### The Model Service +Based on the `llama.cpp` inference server and related to the [ai-lab-recipes model server](https://github.com/containers/ai-lab-recipes/tree/main/model_servers/llamacpp_python). -## Background +### The Application +A [Streamlit](https://github.com/streamlit/streamlit) application to interact with the model service which is based on the related [Chatbot Template](https://github.com/redhat-ai-dev/ai-lab-template/tree/main/templates/chatbot/content). -The chatbot helm chart utilizes two main deployments: +### The GitOps Job +A [job](./templates/application-gitops-job.yaml) which takes care of the creating the application github repository. -1. The model service deployment, based on `llama.cpp` inference server and related to the [ai-lab-recipes model server](https://github.com/containers/ai-lab-recipes/tree/main/model_servers/llamacpp_python). -2. The application deployment, a Streamlit based application to interact with the model service. The application is based on the related [Chatbot Template](https://github.com/redhat-ai-dev/ai-lab-template/tree/main/templates/chatbot/content). +### The Tekton Repository +A [repository](./templates/tekton-repository.yaml) which connects our application with the `pipeline-as-code-controller` which allows us to manage all webhooks received from our GitHub Application. -Apart from the two main deployments, the gitops & OpenShift Pipelines parts are handled by the following: +## Prerequisites -1. The [application-gitops-job](./templates/application-gitops-job.yaml) which takes care of the application github repository creation. -2. The [tekton Repository](./templates/tekton-repository.yaml) which connects our application with the `pipeline-as-code-controller` and we are able to manage all webhooks received from our Github App. +- A [Github Application](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#github-app) with `create repository` permissions for the GitHub Organization where the application will be created. +- Access to an OpenShift 4.x cluster with + - permissions to deploy an application + - an existing Namespace where your application will be deployed + - correctly [installed](https://www.redhat.com/en/technologies/cloud-computing/openshift/pipelines) and [configured](https://github.com/redhat-ai-dev/ai-lab-helm-charts/blob/main/docs/PIPELINES_CONFIGURATION.md) OpenShift Pipelines Operator which is connected to your GitHub Applications webhook + - a Secret (of `key/value` type) in the existing Namespace containing a [Github Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) with [these permissions](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#procedure) to the given Github Organization ## Installation -The helm chart can be directly installed from the OpenShift Dev Console. Check [here](https://docs.redhat.com/en/documentation/openshift_container_platform/4.8/html/building_applications/working-with-helm-charts#understanding-helm) for more information. +### Using the OpenShift Developer Catalog + +This Helm Chart can be installed from the [Developer Catalog](https://docs.openshift.com/container-platform/4.17/applications/creating_applications/odc-creating-applications-using-developer-perspective.html#odc-using-the-developer-catalog-to-add-services-or-components_odc-creating-applications-using-developer-perspective) using the [OpenShift Developer Console](https://docs.openshift.com/container-platform/4.17/web_console/web-console-overview.html#about-developer-perspective_web-console-overview). -### Install using Helm +### Using the Helm Install Command -To install the Chatbot AI Sample Helm chart using Helm directly, you can run: +This Helm Chart can be installed via the command line by running the following command: ``` -helm upgrade --install --namespace . +helm upgrade --install --namespace . ``` -The `.gitignore` file in this repository filters files named `private-values.yaml`. Thus, you can maintain in -your local fork of this repository a value settings file outside of git management. Copy `values.yaml` in this directory to `private-values.yaml` and make any necessary edits to `private-values.yaml`. Then change your helm invocation to the following: +**NOTE:** +You can create a `private-values.yaml` file that will be ignored by git to pass values to your Helm Chart. +Just copy the existing `values.yaml` file in this directory to `private-values.yaml` and make any necessary edits, then update your installation command as shown below: ```shell -helm upgrade --install --namespace -f ./private-values.yaml . +helm upgrade --install --namespace -f ./private-values.yaml . ``` -## Values - -Below is a table of each value used to configure this chart. Note: - -- Your helm release's name will be used to as the name of the application github repository. +## Maintainers -### Application +| Name | Email | Url | +| ---- | ------ | --- | +| Red Hat AI Development Team | | | +## Source Code -| Value | Description | Default | Additional Information | -| -------------------------- | ------------------------------------------------------------- | -------------------------------------- | ---------------------- | -| `application.appPort` | The exposed port of the application | 8501 | | -| `application.appContainer` | The initial image used for the chatbot application interface. | `quay.io/redhat-ai-dev/chatbot:latest` | | - -### Model +* +## Requirements -| Value | Description | Default | Additional Information | -| ----------------------------- | --------------------------------------------------------------------- | ------------------------------------------------------- | ---------------------- | -| `model.modelServicePort` | The exposed port of the model service. | 8001 | | -| `model.modelServiceContainer` | The image used for the model service. | `quay.io/ai-lab/llamacpp_python:latest` | | -| `initContainer` | The image used for the initContainer of the model service deployment. | `quay.io/redhat-ai-dev/granite-7b-lab:latest` | | -| `model.modelInitCommand` | The model service initContainer command. | `['/usr/bin/install', '/model/model.file', '/shared/']` | | -| `model.modelPath` | The path of the model file inside the model service container. | `/model/model.file` | | +Kubernetes: `>= 1.27.0-0` -### Gitops +## Values -| Value | Description | Default | Additional Information | -| -------------------------- |-----------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------| ---------------------- | -| `gitops.gitSecretName` | The name of the Secret containing the required Github token. | `git-secrets` | | -| `gitops.gitSecretKeyToken` | The name of the Secret's key with the Github token value. | `password` | | -| `gitops.githubOrgName` | `[REQUIRED]` The Github Organization name that the chatbot application repository will be created into. | | | -| `gitops.gitSourceRepo` | The Github Repository with the contents of the ai-lab sample chatbot application. It must be either the `redhat-ai-dev/ai-lab-samples` or its fork. | `redhat-ai-dev/ai-lab-samples` | -| `gitops.gitDefaultBranch` | The default branch for the chatbot application Github repository. | `main` | | -| `gitops.quayAccountName` | `[REQUIRED]` The quay.io account that the application image will be pushed. | | | +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| application.appContainer | string | `"quay.io/redhat-ai-dev/chatbot:latest"` | The image used for the initial chatbot application interface | +| application.appPort | int | `8501` | The exposed port of the application | +| gitops.gitDefaultBranch | string | `"main"` | The default branch for the chatbot application Github repository | +| gitops.gitSecretKeyToken | string | `"GITHUB_TOKEN"` | The name of the Secret's key with the Github token value | +| gitops.gitSecretName | string | `"github-secrets"` | The name of the Secret containing the required Github token | +| gitops.gitSourceRepo | string | `"redhat-ai-dev/ai-lab-samples"` | The Github Repository with the contents of the ai-lab sample chatbot application | +| gitops.githubOrgName | string | `""` | [REQUIRED] The Github Organization name that the chatbot application repository will be created in | +| gitops.quayAccountName | string | `""` | [REQUIRED] The quay.io account that the application image will be pushed | +| model.initContainer | string | `"quay.io/redhat-ai-dev/granite-7b-lab:latest"` | The image used for the initContainer of the model service deployment | +| model.modelInitCommand | string | `"['/usr/bin/install', '/model/model.file', '/shared/']"` | The model service initContainer command | +| model.modelPath | string | `"/model/model.file"` | The path of the model file inside the model service container | +| model.modelServiceContainer | string | `"quay.io/ai-lab/llamacpp_python:latest"` | The image used for the model service | +| model.modelServicePort | int | `8001` | The exposed port of the model service | + +**NOTE:** Your helm release's name will be used as the name of the application github repository diff --git a/charts/ai-software-templates/chatbot/README.md.gotmpl b/charts/ai-software-templates/chatbot/README.md.gotmpl new file mode 100644 index 0000000..295e8ab --- /dev/null +++ b/charts/ai-software-templates/chatbot/README.md.gotmpl @@ -0,0 +1,59 @@ +{{ template "chart.deprecationWarning" . }} +# Chatbot AI Sample Helm Chart +{{ template "chart.badgesSection" . }} + +{{ template "chart.description" . }} + +## Background + +This Helm Chart creates the following components: + +### The Model Service +Based on the `llama.cpp` inference server and related to the [ai-lab-recipes model server](https://github.com/containers/ai-lab-recipes/tree/main/model_servers/llamacpp_python). + +### The Application +A [Streamlit](https://github.com/streamlit/streamlit) application to interact with the model service which is based on the related [Chatbot Template](https://github.com/redhat-ai-dev/ai-lab-template/tree/main/templates/chatbot/content). + +### The GitOps Job +A [job](./templates/application-gitops-job.yaml) which takes care of the creating the application github repository. + +### The Tekton Repository +A [repository](./templates/tekton-repository.yaml) which connects our application with the `pipeline-as-code-controller` which allows us to manage all webhooks received from our GitHub Application. + +## Prerequisites + +- A [Github Application](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#github-app) with `create repository` permissions for the GitHub Organization where the application will be created. +- Access to an OpenShift 4.x cluster with + - permissions to deploy an application + - an existing Namespace where your application will be deployed + - correctly [installed](https://www.redhat.com/en/technologies/cloud-computing/openshift/pipelines) and [configured](https://github.com/redhat-ai-dev/ai-lab-helm-charts/blob/main/docs/PIPELINES_CONFIGURATION.md) OpenShift Pipelines Operator which is connected to your GitHub Applications webhook + - a Secret (of `key/value` type) in the existing Namespace containing a [Github Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic) with [these permissions](https://github.com/redhat-ai-dev/ai-rhdh-installer/blob/main/docs/APP-SETUP.md#procedure) to the given Github Organization + +## Installation + +### Using the OpenShift Developer Catalog + +This Helm Chart can be installed from the [Developer Catalog](https://docs.openshift.com/container-platform/4.17/applications/creating_applications/odc-creating-applications-using-developer-perspective.html#odc-using-the-developer-catalog-to-add-services-or-components_odc-creating-applications-using-developer-perspective) using the [OpenShift Developer Console](https://docs.openshift.com/container-platform/4.17/web_console/web-console-overview.html#about-developer-perspective_web-console-overview). + +### Using the Helm Install Command + +This Helm Chart can be installed via the command line by running the following command: + +``` +helm upgrade --install --namespace . +``` + +**NOTE:** +You can create a `private-values.yaml` file that will be ignored by git to pass values to your Helm Chart. +Just copy the existing `values.yaml` file in this directory to `private-values.yaml` and make any necessary edits, then update your installation command as shown below: + +```shell +helm upgrade --install --namespace -f ./private-values.yaml . +``` + +{{ template "chart.maintainersSection" . }} +{{ template "chart.sourcesSection" . }} +{{ template "chart.requirementsSection" . }} +{{ template "chart.valuesSection" . }} + +**NOTE:** Your helm release's name will be used as the name of the application github repository diff --git a/charts/ai-software-templates/chatbot/values.schema.json b/charts/ai-software-templates/chatbot/values.schema.json index 0537044..8618687 100644 --- a/charts/ai-software-templates/chatbot/values.schema.json +++ b/charts/ai-software-templates/chatbot/values.schema.json @@ -1,85 +1,102 @@ { - "$schema": "http://json-schema.org/schema#", - "type": "object", + "$schema": "https://json-schema.org/draft/2020-12/schema", "properties": { "application": { - "type": "object", "properties": { - "appPort": { - "type": "integer", - "description": "The exposed port of the application, if blank it defaults to 8501.", - "default": 8501 - }, "appContainer": { - "type": "string", - "description": "The image used for the initial chatbot application interface, if blank it defaults to 'quay.io/redhat-ai-dev/chatbot:latest'.", - "default": "quay.io/redhat-ai-dev/chatbot:latest" + "default": "quay.io/redhat-ai-dev/chatbot:latest", + "description": "The image used for the initial chatbot application interface.", + "title": "App Container", + "type": "string" + }, + "appPort": { + "default": 8501, + "description": "The exposed port of the application.", + "title": "App Port", + "type": "integer" } - } + }, + "type": "object" }, - "model": { - "type": "object", + "gitops": { "properties": { - "modelServicePort": { - "type": "integer", - "description": "The exposed port of the model service, if blank it defaults to 8001.", - "default": 8001 + "gitDefaultBranch": { + "default": "main", + "description": "The default branch for the chatbot application Github repository.", + "title": "Git Default Branch", + "type": "string" }, - "modelServiceContainer": { - "type": "string", - "description": "The image used for the model service, if blank it defaults to 'quay.io/ai-lab/llamacpp_python:latest'.", - "default": "quay.io/ai-lab/llamacpp_python:latest" + "gitSecretKeyToken": { + "default": "GITHUB_TOKEN", + "description": "The name of the Secret's key with the Github token value.", + "title": "Secret Key Token", + "type": "string" }, - "initContainer": { - "type": "string", - "description": "The image used for the initContainer of the model service deployment, if blank it defaults to 'quay.io/redhat-ai-dev/granite-7b-lab:latest'.", - "default": "quay.io/redhat-ai-dev/granite-7b-lab:latest" + "gitSecretName": { + "default": "github-secrets", + "description": "The name of the Secret containing the required Github token.", + "title": "Secret Name", + "type": "string" }, - "modelInitCommand": { - "type": "string", - "description": "The model service initContainer command, if blank it defaults to ['/usr/bin/install', '/model/model.file', '/shared/'].", - "default": "['/usr/bin/install', '/model/model.file', '/shared/']" + "gitSourceRepo": { + "default": "redhat-ai-dev/ai-lab-samples", + "description": "The Github Repository with the contents of the ai-lab sample chatbot application. It must be either the 'redhat-ai-dev/ai-lab-samples' or its fork.", + "title": "Git Source Repo", + "type": "string" }, - "modelPath": { - "type": "string", - "description": "The path of the model file inside the model service container, if blank it defaults to '/model/model.file'.", - "default": "/model/model.file" + "githubOrgName": { + "description": "[REQUIRED] The Github Organization name that the chatbot application repository will be created into.", + "title": "GitHub Org Name", + "type": "string" + }, + "quayAccountName": { + "description": "[REQUIRED] The quay.io account that the application image will be pushed.", + "title": "Quay Account Name", + "type": "string" } - } + }, + "required": [ + "quayAccountName", + "githubOrgName" + ], + "type": "object" }, - "gitops": { - "type": "object", + "model": { "properties": { - "gitSecretName": { - "type": "string", - "description": "The name of the Secret containing the required Github token. If blank it defaults to 'github-secrets'.", - "default": "github-secrets" - }, - "gitSecretKeyToken": { - "type": "string", - "description": "The name of the Secret's key with the Github token value. If blank it defaults to 'password'.", - "default": "password" + "initContainer": { + "default": "quay.io/redhat-ai-dev/granite-7b-lab:latest", + "description": "The image used for the initContainer of the model service deployment.", + "title": "Init Container", + "type": "string" }, - "githubOrgName": { - "type": "string", - "description": "[REQUIRED] The Github Organization name that the chatbot application repository will be created into." + "modelInitCommand": { + "default": "['/usr/bin/install', '/model/model.file', '/shared/']", + "description": "The model service initContainer command.", + "title": "Init Command", + "type": "string" }, - "gitSourceRepo": { - "type": "string", - "description": "The Github Repository with the contents of the ai-lab sample chatbot application. It must be either the 'redhat-ai-dev/ai-lab-samples' or its fork. If blank it defaults to 'redhat-ai-dev/ai-lab-samples'.", - "default": "redhat-ai-dev/ai-lab-samples" + "modelPath": { + "default": "/model/model.file", + "description": "The path of the model file inside the model service container.", + "title": "Path", + "type": "string" }, - "gitDefaultBranch": { - "type": "string", - "description": "The default branch for the chatbot application Github repository. If blank it defaults to 'main'.", - "default": "main" + "modelServiceContainer": { + "default": "quay.io/ai-lab/llamacpp_python:latest", + "description": "The image used for the model service.", + "title": "Service Container", + "type": "string" }, - "quayAccountName": { - "type": "string", - "description": "[REQUIRED] The quay.io account that the application image will be pushed." + "modelServicePort": { + "default": 8001, + "description": "The exposed port of the model service.", + "title": "Service Port", + "type": "integer" } - } + }, + "type": "object" } - } -} - + }, + "title": "Chatbot AI Sample Helm Chart", + "type": "object" +} \ No newline at end of file diff --git a/charts/ai-software-templates/chatbot/values.schema.tmpl.json b/charts/ai-software-templates/chatbot/values.schema.tmpl.json new file mode 100644 index 0000000..edc1dd1 --- /dev/null +++ b/charts/ai-software-templates/chatbot/values.schema.tmpl.json @@ -0,0 +1,99 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "title": "Chatbot AI Sample Helm Chart", + "properties": { + "application": { + "type": "object", + "properties": { + "appPort": { + "title": "App Port", + "type": "integer", + "description": "The exposed port of the application.", + "default": 8501 + }, + "appContainer": { + "title": "App Container", + "type": "string", + "description": "The image used for the initial chatbot application interface.", + "default": "quay.io/redhat-ai-dev/chatbot:latest" + } + } + }, + "model": { + "type": "object", + "properties": { + "modelServicePort": { + "title": "Service Port", + "type": "integer", + "description": "The exposed port of the model service.", + "default": 8001 + }, + "modelServiceContainer": { + "title": "Service Container", + "type": "string", + "description": "The image used for the model service.", + "default": "quay.io/ai-lab/llamacpp_python:latest" + }, + "initContainer": { + "title": "Init Container", + "type": "string", + "description": "The image used for the initContainer of the model service deployment.", + "default": "quay.io/redhat-ai-dev/granite-7b-lab:latest" + }, + "modelInitCommand": { + "title": "Init Command", + "type": "string", + "description": "The model service initContainer command.", + "default": "['/usr/bin/install', '/model/model.file', '/shared/']" + }, + "modelPath": { + "title": "Path", + "type": "string", + "description": "The path of the model file inside the model service container.", + "default": "/model/model.file" + } + } + }, + "gitops": { + "type": "object", + "properties": { + "gitSecretName": { + "title": "Secret Name", + "type": "string", + "description": "The name of the Secret containing the required Github token.", + "default": "github-secrets" + }, + "gitSecretKeyToken": { + "title": "Secret Key Token", + "type": "string", + "description": "The name of the Secret's key with the Github token value.", + "default": "GITHUB_TOKEN" + }, + "githubOrgName": { + "title": "GitHub Org Name", + "type": "string", + "description": "[REQUIRED] The Github Organization name that the chatbot application repository will be created into." + }, + "gitSourceRepo": { + "title": "Git Source Repo", + "type": "string", + "description": "The Github Repository with the contents of the ai-lab sample chatbot application. It must be either the 'redhat-ai-dev/ai-lab-samples' or its fork.", + "default": "redhat-ai-dev/ai-lab-samples" + }, + "gitDefaultBranch": { + "title": "Git Default Branch", + "type": "string", + "description": "The default branch for the chatbot application Github repository.", + "default": "main" + }, + "quayAccountName": { + "title": "Quay Account Name", + "type": "string", + "description": "[REQUIRED] The quay.io account that the application image will be pushed." + } + }, + "required": ["quayAccountName", "githubOrgName"] + } + } +} diff --git a/charts/ai-software-templates/chatbot/values.yaml b/charts/ai-software-templates/chatbot/values.yaml index fc2db1b..23ba634 100644 --- a/charts/ai-software-templates/chatbot/values.yaml +++ b/charts/ai-software-templates/chatbot/values.yaml @@ -1,19 +1,31 @@ - application: + # -- The exposed port of the application appPort: 8501 + # -- The image used for the initial chatbot application interface appContainer: "quay.io/redhat-ai-dev/chatbot:latest" model: + # -- The image used for the initContainer of the model service deployment initContainer: "quay.io/redhat-ai-dev/granite-7b-lab:latest" + # -- The model service initContainer command modelInitCommand: "['/usr/bin/install', '/model/model.file', '/shared/']" + # -- The path of the model file inside the model service container modelPath: "/model/model.file" + # -- The image used for the model service modelServiceContainer: "quay.io/ai-lab/llamacpp_python:latest" + # -- The exposed port of the model service modelServicePort: 8001 gitops: + # -- The name of the Secret containing the required Github token gitSecretName: "github-secrets" - gitSecretKeyToken: "password" + # -- The name of the Secret's key with the Github token value + gitSecretKeyToken: "GITHUB_TOKEN" + # -- The Github Repository with the contents of the ai-lab sample chatbot application gitSourceRepo: "redhat-ai-dev/ai-lab-samples" + # -- The default branch for the chatbot application Github repository gitDefaultBranch: "main" + # -- [REQUIRED] The Github Organization name that the chatbot application repository will be created in githubOrgName: "" - quayAccountName: "" \ No newline at end of file + # -- [REQUIRED] The quay.io account that the application image will be pushed + quayAccountName: "" diff --git a/charts/ai-software-templates/pipeline-install/README.md b/charts/ai-software-templates/pipeline-install/README.md index 2b0915d..124175e 100644 --- a/charts/ai-software-templates/pipeline-install/README.md +++ b/charts/ai-software-templates/pipeline-install/README.md @@ -1,50 +1,19 @@ -# OpenShift Pipelines Installation / Configuration for Devtools AI Sample Applications. -This repo is a Helm chart that a user with Cluster Admin level privileges would run to set up OpenShift Pipelines for the Devtools AI sample applications. For more information about helm charts see the official [Helm Charts Documentation](https://helm.sh/). +# ops-ai-labs-install +![Version: 0.1.0](https://img.shields.io/badge/Version-0.1.0-informational?style=flat-square) -## Requirements - -- You have sufficient permissions to install operators on an OCP cluster. -- You have sufficient permissions to create a namespace whose name starts with "openshift-". -- You have sufficient permissions to create RBAC and ServiceAccounts in that namespace. - -## Background - -The chart first installs the [Openshift Pipelines Operator](https://www.redhat.com/en/technologies/cloud-computing/openshift/pipelines) and once Pipelines is up and running, ensures the Tekton Pipelines features needed by the sample AI applications are turned on. - -Lastly, it will set up the Pipelines As Code component of OpenShift Pipelines with the credentials you provide in your `values.yaml` file that allow for interaction with your GitHub Application and the events it submits when you interact with your application's -GitHub repository. - -## Installation - -The helm chart can be directly installed from the OpenShift Dev Console. Check [here](https://docs.redhat.com/en/documentation/openshift_container_platform/4.8/html/building_applications/working-with-helm-charts#understanding-helm) for more information. +A Helm chart for installing the OpenShift Pipelines Operator, ensuring requisite features for our samples are enabled, as well as providing GitHub Application credentials to Pipelines As Code. -### Install using Helm - -To install the Pipeline Install Helm chart using Helm directly, you can run: - -``` -helm upgrade --install --namespace openshift-pipelines --create-namespace . -``` - -The `.gitignore` file in this repository filters files named `private-values.yaml`. Thus, you can maintain in -your local fork of this repository a value settings file outside of git management. Copy `values.yaml` in this directory to `private-values.yaml` and make any necessary edits to `private-values.yaml`. Then change your helm invocation to the following: +## Requirements -```shell -helm upgrade --install --namespace openshift-pipelines --create-namespace -f ./private-values.yaml . -``` +Kubernetes: `>= 1.27.0-0` ## Values -Below is a table of each value used to configure this chart. Note: - -- Your helm release's name will be used to as a prefix for the `Subscription` instance used to initiate the install of OpenShift Pipelines. - -### Tekton +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| tekton.gitAppId | string | `""` | | +| tekton.gitAppWebhookSecret | string | `""` | | -| Value | Description | Default | Additional Information | -|------------------------------|--------------------------------------------------------------|---------| ---------------------- | -| `tekton.gitAppId` | `[REQUIRED]` The ID of the GitHub Application. | | | -| `tekton.gitAppWebhookSecret` | `[REQUIRED]` The GitHub Application Webhook secret value. | | | -| `tekton.gitAppPrivateKey` | `[REQUIRED]` The Github Application multi-lined private key. | | | diff --git a/charts/ai-software-templates/pipeline-setup/README.md b/charts/ai-software-templates/pipeline-setup/README.md index a9c62db..1ce5429 100644 --- a/charts/ai-software-templates/pipeline-setup/README.md +++ b/charts/ai-software-templates/pipeline-setup/README.md @@ -1,51 +1,21 @@ -# OpenShift Pipelines Application Namespace setup for Devtools AI Sample Applications. -This repo is a Helm chart that a user admin level privileges to his application namespace would run to set up OpenShift Pipelines for the Devtools AI sample applications. For more information about helm charts see the official [Helm Charts Documentation](https://helm.sh/). +# ops-ai-labs-setup +![Version: 0.1.0](https://img.shields.io/badge/Version-0.1.0-informational?style=flat-square) -## Requirements - -- You have sufficient permissions to create a namespace. -- You have sufficient permissions to create RBAC and ServiceAccounts in your namespace. - -## Background - -This chart assumes the [OpenShift Pipelines install chart](../pipeline-install/README.md) has been run, something equivalent to what it does, so -that OpenShift Pipelines can run the Tekton Pipelines under the [rhdh-pipelines](https://github.com/redhat-ai-dev/rhdh-pipelines) repo, and that -Pipelines As Code has sufficient credentials to process events from you GitHub Application. - -This chart will then set up the Quay and Git credentials in Secrets so that the Tekton Pipelines can interact with your -applications GitHub repository and push you versions of your application's image to your Quay repository. - -## Installation +A Helm chart for integrating the Git and Quay configuration for Pipelines in the user's application namespace. -The helm chart can be directly installed from the OpenShift Dev Console. Check [here](https://docs.redhat.com/en/documentation/openshift_container_platform/4.8/html/building_applications/working-with-helm-charts#understanding-helm) for more information. - -### Install using Helm - -To install the Pipelines Setup Helm chart using Helm directly, you can run: - -``` -helm upgrade --install --namespace --create-namespace . -``` - -The `.gitignore` file in this repository filters files named `private-values.yaml`. Thus, you can maintain in -your local fork of this repository a value settings file outside of git management. Copy `values.yaml` in this directory to `private-values.yaml` and make any necessary edits to `private-values.yaml`. Then change your helm invocation to the following: +## Requirements -```shell -helm upgrade --install --namespace --create-namespace -f ./private-values.yaml . -``` +Kubernetes: `>= 1.27.0-0` ## Values -Below is a table of each value used to configure this chart. - -### Tekton +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| tekton.gitSecretKeyToken | string | `"password"` | | +| tekton.gitSecretName | string | `"github-secrets"` | | +| tekton.gitSecretToken | string | `""` | | +| tekton.quayConfigJSON | string | `"< your config.json file contents, ideally in compact, single line format>"` | | +| tekton.quaySecretName | string | `"ai-lab-image-registry-token"` | | -| Value | Description | Default | Additional Information | -|----------------------------|-----------------------------------------------------------------------------------------|-------------------------------| ---------------------- | -| `tekton.quayConfigJSON` | `[REQUIRED]` The quay.io config.json used when the application image will be pushed. | | | -| `tekton.quaySecretName` | The name of the Secret containing the required Quay token. | 'ai-lab-image-registry-token' | | -| `tekton.gitSecretName` | The name of the Secret containing the required Github token. | 'github-secrets' | | -| `tekton.gitSecretKeyToken` | The name of the Secret's key with the Github token value. | 'password' | | -| `tekton.gitSecretToken` | `[REQUIRED]` The Github token value. | | |