Skip to content

Commit

Permalink
Update package versions (#20)
Browse files Browse the repository at this point in the history
* debug es

* remove gp dep
* do not build greemplum

* add pgvectorscale

* update kafka ver

* update dockerfiles
  • Loading branch information
haobibo authored Aug 25, 2024
1 parent 5ad3e7d commit a1480f9
Show file tree
Hide file tree
Showing 9 changed files with 62 additions and 56 deletions.
25 changes: 6 additions & 19 deletions .github/workflows/build-docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,12 @@ on:
workflow_dispatch:

env:
REGISTRY_URL: "docker.io" # docker.io or other registry URL, DOCKER_REGISTRY_USERNAME/DOCKER_REGISTRY_PASSWORD to be set in CI env.
BUILDKIT_PROGRESS: "plain" # Full logs for CI build.

REGISTRY_URL: "docker.io" # docker.io or other target registry URL: where to push images to.
REGISTRY_SRC: "docker.io" # For BASE_NAMESPACE of images: where to pull base images from.
# DOCKER_REGISTRY_USERNAME and DOCKER_REGISTRY_PASSWORD is required for docker image push, they should be set in CI secrets.
DOCKER_REGISTRY_USERNAME: ${{ secrets.DOCKER_REGISTRY_USERNAME }}
DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}

# used to sync image to mirror registry
DOCKER_MIRROR_REGISTRY_USERNAME: ${{ secrets.DOCKER_MIRROR_REGISTRY_USERNAME }}
DOCKER_MIRROR_REGISTRY_PASSWORD: ${{ secrets.DOCKER_MIRROR_REGISTRY_PASSWORD }}
Expand Down Expand Up @@ -54,15 +53,6 @@ jobs:
source ./tool.sh
build_image kafka latest docker_kafka_confluent/Dockerfile && push_image
qpod_greenplum:
name: "greenplum"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: |
source ./tool.sh
build_image greenplum latest docker_greenplum/Dockerfile && push_image
qpod_postgres:
name: "postgres-16-ext,postgres-15-ext"
runs-on: ubuntu-latest
Expand All @@ -76,15 +66,12 @@ jobs:
## Sync all images in this build (listed by "names") to mirror registry.
sync_images:
needs: ["qpod_bigdata", "qpod_elasticsearch", "qpod_kafka_confluent", "qpod_postgres", "qpod_greenplum"]
needs: ["qpod_bigdata", "qpod_elasticsearch", "qpod_kafka_confluent", "qpod_postgres"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: |
source ./tool.sh
printenv > /tmp/docker.env
docker run --rm \
--env-file /tmp/docker.env \
-v $(pwd):/tmp \
-w /tmp \
qpod/docker-kit /opt/conda/bin/python /opt/utils/image-syncer/run_jobs.py
printenv | grep -v 'PATH' > /tmp/docker.env
docker run --rm --env-file /tmp/docker.env -v $(pwd):/tmp -w /tmp \
${IMG_NAMESPACE:-qpod}/docker-kit python /opt/utils/image-syncer/run_jobs.py
5 changes: 3 additions & 2 deletions docker_bigdata/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,11 @@ ENV CONDA_PREFIX=/opt/conda
ENV JAVA_HOME=/opt/jdk
ENV PATH=$PATH:${CONDA_PREFIX}/bin:${JAVA_HOME}/bin

RUN source /opt/utils/script-setup.sh \
RUN set -eux \
&& source /opt/utils/script-setup.sh \
&& source /opt/utils/script-setup-db-clients.sh \
&& install_apt /opt/utils/install_list_base.apt \
&& echo "Install tini" && setup_tini \
# && echo "Install tini" && setup_tini \
&& echo "Install postgresql client:" && setup_postgresql_client \
&& echo "Install mysql client:" && setup_mysql_client \
&& echo "Install mongosh:" && setup_mongosh_client \
Expand Down
30 changes: 15 additions & 15 deletions docker_elasticsearch/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
ARG BASE_NAMESPACE
ARG BASE_IMG="base"
ARG BASE_IMG="atom"
FROM ${BASE_NAMESPACE:+$BASE_NAMESPACE/}${BASE_IMG} as builder

ARG ES_VERSION="7.11.2"
ARG ES_VERSION="8.14.1"


RUN source /opt/utils/script-utils.sh \
RUN set -eux && source /opt/utils/script-utils.sh \
&& ES_SOURCECODE="https://github.com/elastic/elasticsearch/archive/v${ES_VERSION}.tar.gz" \
&& ES_ARTIFACT="https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-${ES_VERSION}-linux-x86_64.tar.gz" \
&& install_tar_gz ${ES_SOURCECODE} && mv /opt/elasticsearch-* /tmp/elasticsearch \
Expand All @@ -17,14 +17,14 @@ RUN source /opt/utils/script-utils.sh \
&& sed -i -e 's/ES_DISTRIBUTION_TYPE=tar/ES_DISTRIBUTION_TYPE=docker/' /opt/elasticsearch/bin/elasticsearch-env \
&& mkdir -pv config/jvm.options.d

RUN set -ex \
RUN set -eux \
&& mkdir -pv /tmp/build/src && cd /tmp/build \
&& ln -s /opt/elasticsearch/lib /tmp/build/ \
&& ln -s /opt/elasticsearch/modules /tmp/build/ \
&& find /tmp/elasticsearch -name "License.java" | xargs -r -I {} cp {} . \
&& sed -i 's#this.type = type;#this.type = "platinum";#g' License.java \
&& sed -i 's#validate();#// validate();#g' License.java \
&& javac -cp "`ls lib/elasticsearch-${ES_VERSION}.jar`:`ls lib/elasticsearch-x-content-*.jar`:`ls lib/lucene-core-*.jar`:`ls modules/x-pack-core/x-pack-core-*.jar`" License.java \
&& javac -cp "`ls lib/elasticsearch-${ES_VERSION}.jar`:`ls lib/elasticsearch-core-${ES_VERSION}.jar`:`ls lib/elasticsearch-x-content-*.jar`:`ls lib/lucene-core-*.jar`:`ls modules/x-pack-core/x-pack-core-*.jar`" License.java \
&& cd /tmp/build/src \
&& find /opt/elasticsearch/ -name "x-pack-core-*.jar" | xargs -r -I {} cp {} . \
&& jar xf x-pack-core-${ES_VERSION}.jar \
Expand All @@ -33,24 +33,24 @@ RUN set -ex \
&& cp -r /tmp/build/src/x-pack-core-*.jar /opt/elasticsearch/modules/x-pack-core/ \
&& rm -rf /tmp/*

COPY --chown=1000:0 docker-entrypoint.sh /opt/elasticsearch/docker-entrypoint.sh
COPY --chown=1000:0 elasticsearch.yml /opt/elasticsearch/config/
COPY --chown=1000:0 log4j2.properties /opt/elasticsearch/config/
COPY --chown=1001:0 docker-entrypoint.sh /opt/elasticsearch/docker-entrypoint.sh
COPY --chown=1001:0 elasticsearch.yml /opt/elasticsearch/config/
COPY --chown=1001:0 log4j2.properties /opt/elasticsearch/config/

# Second build stage
FROM ${BASE_NAMESPACE:+$BASE_NAMESPACE/}${BASE_IMG}

LABEL maintainer="[email protected]"

COPY --from=builder /opt/jdk /opt/jdk
COPY --from=builder --chown=1000:0 /opt/elasticsearch /opt/elasticsearch
COPY --from=builder --chown=1001:0 /opt/elasticsearch /opt/elasticsearch

RUN source /opt/utils/script-setup.sh \
&& setup_tini \
RUN set -eux && source /opt/utils/script-setup.sh \
# && setup_tini \
&& ln -s /opt/jdk/bin/* /usr/bin/ \
&& ln -s /opt/elasticsearch /usr/share/ \
&& groupadd --gid 1000 elasticsearch \
&& adduser --system -q --uid 1000 --gid 1000 --gid 0 --home /opt/elasticsearch elasticsearch \
&& groupadd --gid 1001 elasticsearch \
&& adduser --system -q --uid 1001 --gid 1001 --gid 0 --home /opt/elasticsearch elasticsearch \
&& chmod -R 0775 /opt/elasticsearch && chgrp 0 /opt/elasticsearch \
&& chmod g=u /etc/passwd \
&& find / -xdev -perm -4000 -exec chmod ug-s {} + \
Expand All @@ -59,7 +59,7 @@ RUN source /opt/utils/script-setup.sh \
&& echo "elastic" | bin/elasticsearch-keystore add -xf bootstrap.password \
&& echo "" | bin/elasticsearch-certutil ca -s --out elastic-stack-ca.p12 --pass \
&& echo "" | bin/elasticsearch-certutil cert -s --ca elastic-stack-ca.p12 --ca-pass "" --out elastic-certificates.p12 --pass \
&& mv elastic-*.p12 config/ && chown 1000:0 config/elastic-*.p12 config/elasticsearch.keystore \
&& mv elastic-*.p12 config/ && chown 1001:0 config/elastic-*.p12 config/elasticsearch.keystore \
&& echo "vm.swappiness=0" >> /etc/sysctl.conf \
&& echo "vm.max_map_count=655360 " >> /etc/sysctl.conf \
&& echo "* soft memlock unlimited" >> /etc/security/limits.conf \
Expand All @@ -70,6 +70,6 @@ EXPOSE 9200 9300
WORKDIR /opt/elasticsearch
VOLUME /data/elasticsearch

ENTRYPOINT ["tini", "--", "/opt/elasticsearch/docker-entrypoint.sh"]
ENTRYPOINT ["/opt/elasticsearch/docker-entrypoint.sh"]

CMD ["eswrapper"]
7 changes: 4 additions & 3 deletions docker_greenplum/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,10 @@ COPY rootfs /
RUN set -x && source /opt/utils/script-utils.sh \
&& install_apt /opt/utils/install_list_greenplum.apt \
&& apt-get -qq install -yq --no-install-recommends gcc g++ bison flex cmake pkg-config ccache ninja-build \
&& VERSION_GPDB_RELEASE=$(curl -sL https://github.com/greenplum-db/gpdb/releases.atom | grep 'releases/tag' | grep "/7." | head -1 | grep -Po '\d[\d.]+' ) \
&& URL_GBDP="https://github.com/greenplum-db/gpdb/archive/refs/tags/${VERSION_GPDB_RELEASE}.tar.gz" \
&& echo "Downloading GBDP src release ${VERSION_GPDB_RELEASE} from: ${URL_GBDP}" \
# && VERSION_GPDB_RELEASE=$(curl -sL https://github.com/greenplum-db/gpdb/releases.atom | grep 'releases/tag' | grep "/7." | head -1 | grep -Po '\d[\d.]+' ) \
# && URL_GBDP="https://github.com/greenplum-db/gpdb/archive/refs/tags/${VERSION_GPDB_RELEASE}.tar.gz" \
&& URL_GBDP="https://github.com/greenplum-db/gpdb-archive/archive/refs/heads/main.tar.gz" \
&& echo "Downloading GBDP src release ${VERSION_GPDB_RELEASE:-archive} from: ${URL_GBDP}" \
&& install_tar_gz $URL_GBDP && mv /opt/gpdb-* /opt/gpdb_src \
&& cd /opt/gpdb_src \
&& PYTHON=/opt/conda/bin/python3 ./configure --prefix=/opt/gpdb --with-perl --with-python --with-libxml --with-gssapi --with-openssl \
Expand Down
4 changes: 2 additions & 2 deletions docker_kafka_confluent/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ ARG BASE_NAMESPACE
ARG BASE_IMG="jdk-11"
FROM ${BASE_NAMESPACE:+$BASE_NAMESPACE/}${BASE_IMG}

ARG KAFKA_VERSION="7.3.3"
ARG KAFKA_VERSION="7.7.0"

LABEL maintainer="[email protected]"

Expand All @@ -16,7 +16,7 @@ ENV COMPONENT=kafka \
KAFKA_VERSION="${KAFKA_VERSION}" \
KAFKA_HOME=/opt/kafka

RUN source /opt/utils/script-confluent-kafka.sh \
RUN set -eux && source /opt/utils/script-confluent-kafka.sh \
&& echo "Install confluent-kafka:" && setup_confluent_kafka \
&& echo "Setup confluent Kafka to run in KRaft mode" && setup_confluent_kafka_kraft \
&& echo "Clean up" && list_installed_packages && install__clean
Expand Down
19 changes: 11 additions & 8 deletions docker_kafka_confluent/work/script-confluent-kafka.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,30 +3,33 @@ source /opt/utils/script-utils.sh
setup_confluent_kafka() {
export KAFKA_HOME=/opt/kafka

local VER_C_KAFKA_MINOR=${KAFKA_VERSION:-"7.3.3"}
local VER_C_KAFKA_MINOR=${KAFKA_VERSION:-"7.7.0"}
local VER_C_KAFKA_MAJOR=${VER_C_KAFKA_MINOR%.*}
local URL_C_KAFKA="http://packages.confluent.io/archive/${VER_C_KAFKA_MAJOR}/confluent-community-${VER_C_KAFKA_MINOR}.tar.gz"

# Downlaod CKafka package and unzip to /opt/kafka
# ref: https://docs.confluent.io/platform/current/installation/installing_cp/zip-tar.html#get-the-software
install_tar_gz "${URL_C_KAFKA}" && mv /opt/confluent-* ${KAFKA_HOME} \
## Downlaod CKafka package and unzip to /opt/kafka
## ref: https://docs.confluent.io/platform/current/installation/installing_cp/zip-tar.html#get-the-software
echo "Downloading Kafka ${VER_C_KAFKA_MINOR} from: ${URL_C_KAFKA}" \
&& install_tar_gz "${URL_C_KAFKA}" && mv /opt/confluent-* ${KAFKA_HOME} \
&& echo "Setting up kafka dirs:" && mkdir -pv /var/lib/kafka/data /etc/kafka/secrets \
&& ln -sf ${KAFKA_HOME}/etc /etc/confluent \
&& ls -alh ${KAFKA_HOME}/*

# CKafka docker images requires confluent docker utils for dub/cub command
pip install -U confluent-kafka https://github.com/confluentinc/confluent-docker-utils/archive/refs/heads/master.zip \
pip install -U confluent-kafka

## CKafka docker images requires confluent docker utils for dub/cub command
pip install https://github.com/confluentinc/confluent-docker-utils/archive/refs/heads/master.zip \
&& install_zip https://github.com/confluentinc/confluent-docker-utils/archive/refs/heads/master.zip \
&& PYTHON_SITE=$(python3 -c 'import sys;print(list(filter(lambda s: "site" in s, sys.path))[0])') \
&& cp -rf /opt/confluent-*/confluent ${PYTHON_SITE} \
&& rm -rf /opt/confluent-*

install_zip https://github.com/confluentinc/common-docker/archive/refs/heads/master.zip \
install_zip https://github.com/confluentinc/common-docker/archive/refs/heads/master.zip \
&& mv /opt/common-docker-master ${KAFKA_HOME}/common-docker \
&& mkdir -pv ${KAFKA_HOME}/etc/docker/ \
&& cp -rf ${KAFKA_HOME}/common-docker/base/include/etc/confluent/docker/* ${KAFKA_HOME}/etc/docker/

# CKafka base docker images are built with some scripts included
## CKafka base docker images are built with some scripts included
install_zip https://github.com/confluentinc/kafka-images/archive/refs/heads/master.zip
mv /opt/kafka-images* ${KAFKA_HOME}/kafka-images
cp -rf "${KAFKA_HOME}/kafka-images/kafka/include/etc/confluent/docker/" "${KAFKA_HOME}/etc/"
Expand Down
5 changes: 3 additions & 2 deletions docker_postgres/postgres-ext.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,16 @@ LABEL maintainer="[email protected]"

COPY rootfs /

RUN set -x && . /opt/utils/script-utils.sh && . /opt/utils/script-setup-pg_ext_mirror.sh \
RUN set -eux && . /opt/utils/script-utils.sh && . /opt/utils/script-setup-pg_ext_mirror.sh \
## Generate a package list based on PG_MAJOR version
&& apt-get update && apt-get install -y gettext \
&& envsubst < /opt/utils/install_list_pgext.tpl.apt > /opt/utils/install_list_pgext.apt \
&& rm -rf /opt/utils/install_list_pgext.tpl.apt \
## Install extensions
&& . /opt/utils/script-setup-pg_ext.sh \
&& PYTHON_VERSION=$(python -c 'from sys import version_info as v; print("%s.%s" % (v.major, v.minor))') \
## Hack: fix system python / conda python
&& cp -rf /opt/conda/lib/python3.11/platform.py.bak /opt/conda/lib/python3.11/platform.py \
&& cp -rf "/opt/conda/lib/python${PYTHON_VERSION}/platform.py.bak" "/opt/conda/lib/python${PYTHON_VERSION}/platform.py" \
&& echo "Clean up" && list_installed_packages && install__clean

USER postgres
Expand Down
12 changes: 12 additions & 0 deletions docker_postgres/rootfs/opt/utils/script-setup-pg_ext.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,18 @@ setup_apache_age() {
setup_apache_age


setup_pgvectorscale() {
## ref: https://github.com/timescale/pgvectorscale
ARCH="amd64" \
&& VER_PGVS=$(curl -sL https://github.com/timescale/pgvectorscale/releases.atom | grep 'releases/tag' | head -1 | grep -Po '\d[\d.]+' ) \
&& URL_PGVS="https://github.com/timescale/pgvectorscale/releases/download/${VER_PGVS}/pgvectorscale-${VER_PGVS}-pg${PG_MAJOR}-${ARCH}.zip" \
&& mkdir -pv /tmp/pgvectorscale/ && cd /tmp/pgvectorscale \
&& install_zip ${URL_PGVS} && mv /opt/pgvectorscal* /tmp/pgvectorscale/ \
&& dpkg -i *.deb
}
setup_pgvectorscale


setup_pgroonga(){
## ref1: https://pgroonga.github.io/tutorial/
## ref2: https://github.com/pgroonga/docker
Expand Down
11 changes: 6 additions & 5 deletions tool.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash
set -xu

CI_PROJECT_NAME=${GITHUB_REPOSITORY}
CI_PROJECT_NAME=${GITHUB_REPOSITORY:-"QPod/lab-data"}
CI_PROJECT_BRANCH=${GITHUB_HEAD_REF:-"main"}
CI_PROJECT_SPACE=$(echo "${CI_PROJECT_BRANCH}" | cut -f1 -d'/')

Expand All @@ -15,11 +15,12 @@ fi

export IMG_NAMESPACE=$(echo "${CI_PROJECT_NAMESPACE}" | awk '{print tolower($0)}')
export IMG_PREFIX=$(echo "${REGISTRY_URL:-"docker.io"}/${IMG_NAMESPACE}" | awk '{print tolower($0)}')
export TAG_SUFFIX="-$(git rev-parse --short HEAD)"

echo "--------> CI_PROJECT_NAMESPACE=${CI_PROJECT_NAMESPACE}"
echo "--------> DOCKER_IMG_NAMESPACE=${IMG_NAMESPACE}"
echo "--------> DOCKER_IMG_PREFIX=${IMG_PREFIX}"

echo "--------> DOCKER_TAG_SUFFIX=${TAG_SUFFIX}"

if [ -f /etc/docker/daemon.json ]; then
jq '.experimental=true | ."data-root"="/mnt/docker"' /etc/docker/daemon.json > /tmp/daemon.json && sudo mv /tmp/daemon.json /etc/docker/ \
Expand All @@ -30,7 +31,7 @@ docker info

build_image() {
echo "$@" ;
IMG=$1; TAG=$2; FILE=$3; shift 3; VER=$(date +%Y.%m%d.%H%M); WORKDIR="$(dirname $FILE)";
IMG=$1; TAG=$2; FILE=$3; shift 3; VER=$(date +%Y.%m%d.%H%M)${TAG_SUFFIX}; WORKDIR="$(dirname $FILE)";
docker build --compress --force-rm=true -t "${IMG_PREFIX}/${IMG}:${TAG}" -f "$FILE" --build-arg "BASE_NAMESPACE=${IMG_PREFIX}" "$@" "${WORKDIR}" ;
docker tag "${IMG_PREFIX}/${IMG}:${TAG}" "${IMG_PREFIX}/${IMG}:${VER}" ;
}
Expand All @@ -43,13 +44,13 @@ build_image_no_tag() {

build_image_common() {
echo "$@" ;
IMG=$1; TAG=$2; FILE=$3; shift 3; VER=$(date +%Y.%m%d.%H%M); WORKDIR="$(dirname $FILE)";
IMG=$1; TAG=$2; FILE=$3; shift 3; VER=$(date +%Y.%m%d.%H%M)${TAG_SUFFIX}; WORKDIR="$(dirname $FILE)";
docker build --compress --force-rm=true -t "${IMG_PREFIX}/${IMG}:${TAG}" -f "$FILE" --build-arg "BASE_NAMESPACE=${IMG_PREFIX}" "$@" "${WORKDIR}" ;
docker tag "${IMG_PREFIX}/${IMG}:${TAG}" "${IMG_PREFIX}/${IMG}:${VER}" ;
}

alias_image() {
IMG_1=$1; TAG_1=$2; IMG_2=$3; TAG_2=$4; shift 4; VER=$(date +%Y.%m%d.%H%M);
IMG_1=$1; TAG_1=$2; IMG_2=$3; TAG_2=$4; shift 4; VER=$(date +%Y.%m%d.%H%M)${TAG_SUFFIX};
docker tag "${IMG_PREFIX}/${IMG_1}:${TAG_1}" "${IMG_PREFIX}/${IMG_2}:${TAG_2}" ;
docker tag "${IMG_PREFIX}/${IMG_2}:${TAG_2}" "${IMG_PREFIX}/${IMG_2}:${VER}" ;
}
Expand Down

0 comments on commit a1480f9

Please sign in to comment.