Make gRPC Service's BackupStatus return a node backup #2111
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Build, test and Release | |
on: | |
push: | |
branches: | |
- master | |
- '*.*' | |
pull_request: | |
release: | |
types: | |
- published | |
jobs: | |
build: | |
strategy: | |
matrix: | |
python-version: [3.7, 3.8, 3.9, "3.10", "3.11"] | |
include: | |
- python-version: 3.7 | |
tox-py: py37 | |
- python-version: 3.8 | |
tox-py: py38 | |
- python-version: 3.9 | |
tox-py: py39 | |
- python-version: "3.10" | |
tox-py: py310 | |
- python-version: "3.11" | |
tox-py: py311 | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v1 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Install dependencies | |
run: | | |
python -m venv venv | |
. venv/bin/activate | |
python -m pip install --upgrade pip | |
pip install -r requirements.txt | |
pip install tox | |
- name: Lint with flake8 | |
run: | | |
pip install flake8 | |
# stop the build if there are Python syntax errors or undefined names | |
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics | |
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide | |
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --ignore=W503 | |
- name: Run tox | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
run: | | |
. venv/bin/activate | |
tox -e ${{ matrix.tox-py }} | |
- uses: codecov/codecov-action@v1 | |
name: Report code coverage | |
debian-build: | |
# Build debian packages | |
strategy: | |
matrix: | |
suite: [focal] | |
include: | |
- suite: focal | |
os-version: ubuntu-20.04 | |
fail-fast: false | |
runs-on: ${{ matrix.os-version }} | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Build Debian Package | |
run: | | |
version=$(cat VERSION) | |
version_exists=$(cat debian/changelog | grep "($VERSION)" || echo "") | |
if [ -z "$version_exists" ] | |
then | |
echo "Updating debian changelog..." | |
cd packaging/docker-build | |
docker-compose build release && docker-compose run release | |
cd ../.. | |
fi | |
cd packaging/docker-build | |
docker-compose build "cassandra-medusa-builder-${{ matrix.suite }}" \ | |
&& docker-compose run "cassandra-medusa-builder-${{ matrix.suite }}" | |
cd ../../packages | |
if [ -f "cassandra-medusa_${version}-0~${{ matrix.suite }}0_amd64.deb" ]; then | |
echo "${suite} debian package was successfully built" | |
else | |
echo "Error: no packages found for ${{ matrix.suite }}" | |
exit 1 | |
fi | |
- name: Install Debian Package and run Medusa | |
run: | | |
version=$(cat VERSION) | |
cd packages | |
sudo dpkg -i cassandra-medusa_${version}-0~${{ matrix.suite }}0_amd64.deb | |
medusa | |
exit $? | |
integration-tests: | |
needs: build | |
continue-on-error: ${{ matrix.experimental }} | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: [3.7, 3.8, 3.9, "3.10", "3.11"] | |
it-backend: [local, s3, gcs, minio, azure, azure-hierarchical] | |
# IBM not included by default due to lite plan quota being easily exceeded | |
#it-backend: [local, s3, gcs, minio, ibm, azure] | |
cassandra-version: [2.2.19, 3.11.11, 4.0.0, 'github:apache/trunk'] | |
include: | |
# tweak the experimental flag for cassandra versions | |
- cassandra-version: 2.2.19 | |
experimental: false | |
- cassandra-version: 3.11.11 | |
experimental: false | |
- cassandra-version: 4.0.0 | |
experimental: false | |
# explicitly include tests against python 3.11 and one version of cassandra | |
- python-version: "3.11" | |
cassandra-version: 4.0.0 | |
it-backend: gcs | |
experimental: false | |
# explicitly include tests against python 3.10 and one version of cassandra | |
- python-version: "3.10" | |
cassandra-version: 4.0.0 | |
it-backend: gcs | |
experimental: false | |
# explicitly include tests against python 3.8 and one version of cassandra | |
- python-version: 3.8 | |
cassandra-version: 4.0.0 | |
it-backend: gcs | |
experimental: false | |
# explicitly include tests against python 3.7 and one version of cassandra | |
- python-version: 3.7 | |
cassandra-version: 4.0.0 | |
it-backend: gcs | |
experimental: false | |
exclude: | |
# no tests against trunk | |
- cassandra-version: 'github:apache/trunk' | |
# fewer tests against cassandra 3.11.11 (exclude all but local storage backends) | |
- it-backend: s3 | |
cassandra-version: "3.11.11" | |
- it-backend: gcs | |
cassandra-version: "3.11.11" | |
- it-backend: minio | |
cassandra-version: "3.11.11" | |
- it-backend: azure | |
cassandra-version: "3.11.11" | |
- it-backend: azure-hierarchical | |
cassandra-version: "3.11.11" | |
# no tests against non-python3.9, except the explicitly allowed combinations | |
- python-version: 3.7 | |
- python-version: 3.8 | |
- python-version: "3.10" | |
- python-version: "3.11" | |
runs-on: ubuntu-20.04 | |
services: | |
minio: | |
image: lazybit/minio | |
ports: | |
- 9000:9000 | |
env: | |
MINIO_ACCESS_KEY: minio_key | |
MINIO_SECRET_KEY: minio_secret | |
volumes: | |
- ${{ github.workspace }}/../data:/data | |
options: --name=minio --health-cmd "curl http://localhost:9000/minio/health/live" | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v1 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Setup Java Action | |
uses: actions/setup-java@v1 | |
with: | |
java-version: '8.0.252' | |
architecture: x64 | |
- name: Install dependencies | |
run: | | |
python -m venv venv | |
. venv/bin/activate | |
sudo apt-get remove azure-cli | |
python -m pip install --upgrade pip | |
pip install -r requirements.txt | |
pip install -r requirements-test.txt | |
pip install ccm | |
case '${{ matrix.it-backend }}' in | |
'azure'|'azure-hierarchical') | |
echo "No extra requirements for now." | |
;; | |
'ibm'|'minio'|'s3') | |
echo "No extra requirements for now." | |
;; | |
esac | |
- name: Check if integration tests can run | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.BUCKET_KEY }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.BUCKET_SECRET }} | |
run: | | |
if [[ ( -n "${AWS_ACCESS_KEY_ID}" && "${{ matrix.it-backend }}" == "s3" ) \ | |
|| ( "${{ matrix.it-backend }}" == "local" ) \ | |
|| ( "${{ matrix.it-backend }}" == "minio" ) \ | |
|| ( -n '${{ secrets.MEDUSA_GCS_CREDENTIALS }}' && "${{ matrix.it-backend }}" == "gcs" ) \ | |
|| ( -n '${{ secrets.MEDUSA_IBM_CREDENTIALS }}' && "${{ matrix.it-backend }}" == "ibm" ) \ | |
|| ( -n '${{ secrets.MEDUSA_AZURE_CREDENTIALS }}' && "${{ matrix.it-backend }}" == "azure" ) \ | |
|| ( -n '${{ secrets.MEDUSA_AZURE_HIERARCHICAL_CREDENTIALS }}' && "${{ matrix.it-backend }}" == "azure-hierarchical" ) ]]; | |
then | |
echo "IT_CAN_RUN=yes" >> $GITHUB_ENV | |
else | |
echo "IT_CAN_RUN=no" >> $GITHUB_ENV | |
fi | |
- name: Run integration tests | |
if: ${{ env.IT_CAN_RUN == 'yes' }} | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.BUCKET_KEY }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.BUCKET_SECRET }} | |
run: | | |
set -e | |
. venv/bin/activate | |
if [[ "${{ matrix.it-backend }}" == "ibm" ]]; | |
then | |
# Prevent awscli from using AWS secrets in case we're running against IBM cloud | |
unset AWS_ACCESS_KEY_ID | |
unset AWS_SECRET_ACCESS_KEY | |
fi | |
# Write GCS service account credentials to a file | |
mkdir ~/.aws | |
# This fake cluster needs to be created first so that the integration tests pass in GH actions. Don't ask me why... | |
ccm create test_cluster -v binary:3.11.4 -n 1 --vnodes | |
ccm node1 updateconf 'storage_port: 7011' | |
ccm node1 updateconf 'concurrent_reads: 4' | |
ccm node1 updateconf 'concurrent_writes: 4' | |
ccm node1 updateconf 'concurrent_counter_writes: 4' | |
ccm node1 updateconf 'num_tokens: 4' | |
sed -i 's/#MAX_HEAP_SIZE="4G"/MAX_HEAP_SIZE="256m"/' ~/.ccm/test_cluster/node1/conf/cassandra-env.sh | |
sed -i 's/#HEAP_NEWSIZE="800M"/HEAP_NEWSIZE="200M"/' ~/.ccm/test_cluster/node1/conf/cassandra-env.sh | |
ccm start -v | |
ccm showlastlog|tail -100 | |
ccm stop | |
# if we are dealing with C* 4.0+, we need to fix the java-driver dependency. | |
# it has a bug that breaks sstableloader tests with client encryption enabled | |
# if cassandra version starts with 4.0 or 4.1 | |
if [[ "${{ matrix.cassandra-version }}" =~ ^4\.[01]\. ]]; | |
then | |
ccm create driver-fix-cluster -v ${{ matrix.cassandra-version }} -n 1 | |
# enclosed in () so we return to current pwd once we are done fixing the driver | |
( | |
cd ~/.ccm/repository/${{ matrix.cassandra-version }}/lib | |
rm cassandra-driver-core-3.11.0-shaded.jar | |
wget https://repo1.maven.org/maven2/com/datastax/cassandra/cassandra-driver-core/3.11.5/cassandra-driver-core-3.11.5-shaded.jar | |
) | |
fi | |
if [ "${{ matrix.it-backend }}" == "s3" ] | |
then | |
# AWS S3 Storage tests | |
./run_integration_tests.sh -v --s3 --no-local --cassandra-version=${{ matrix.cassandra-version }} | |
elif [ "${{ matrix.it-backend }}" == "gcs" ] | |
then | |
# Google Cloud Storage tests | |
echo '${{ secrets.MEDUSA_GCS_CREDENTIALS }}' > ~/medusa_credentials.json | |
./run_integration_tests.sh -v --gcs --no-local --cassandra-version=${{ matrix.cassandra-version }} | |
elif [ "${{ matrix.it-backend }}" == "ibm" ] | |
then | |
# IBM Cloud Object Storage tests | |
printf "%s" '${{ secrets.MEDUSA_IBM_CREDENTIALS }}' > ~/.aws/ibm_credentials | |
./run_integration_tests.sh -v --ibm --no-local --cassandra-version=${{ matrix.cassandra-version }} | |
elif [ "${{ matrix.it-backend }}" == "minio" ] | |
then | |
# MinIO Object Storage tests | |
unset AWS_ACCESS_KEY_ID | |
unset AWS_SECRET_ACCESS_KEY | |
wget https://dl.min.io/client/mc/release/linux-amd64/mc | |
chmod +x mc | |
./mc alias set minio http://127.0.0.1:9000 minio_key minio_secret | |
./mc mb minio/medusa-dev | |
cp ./tests/resources/minio/minio_credentials ~/.aws/minio_credentials | |
./run_integration_tests.sh -v --minio --no-local --cassandra-version=${{ matrix.cassandra-version }} | |
elif [ "${{ matrix.it-backend }}" == "azure" ] | |
then | |
# Azure Blob Storage tests | |
printf "%s" '${{ secrets.MEDUSA_AZURE_CREDENTIALS }}' > ~/medusa_azure_credentials.json | |
./run_integration_tests.sh -v --azure --no-local --cassandra-version=${{ matrix.cassandra-version }} | |
elif [ "${{ matrix.it-backend }}" == "azure-hierarchical" ] | |
then | |
# Azure Blob Storage with hierarchical namespace tests | |
printf "%s" '${{ secrets.MEDUSA_AZURE_HIERARCHICAL_CREDENTIALS }}' > ~/medusa_azure_credentials.json | |
./run_integration_tests.sh -v --azure --no-local --cassandra-version=${{ matrix.cassandra-version }} | |
else | |
# Local storage tests | |
./run_integration_tests.sh -v --cassandra-version=${{ matrix.cassandra-version }} | |
fi | |
# Move and convert the coverage analysis file to XML | |
mv tests/integration/.coverage . | |
coverage xml | |
- uses: codecov/codecov-action@v1 | |
name: Report code coverage | |
k8ssandra-e2e-tests: | |
needs: [build] | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
e2e_test: | |
- CreateSingleMedusaJob | |
fail-fast: false | |
name: k8ssandra-${{ matrix.e2e_test }} | |
env: | |
CGO_ENABLED: 0 | |
KUBECONFIG_FILE: "./build/kind-kubeconfig" | |
CONTROL_PLANE: kind-k8ssandra-0 | |
DATA_PLANES: kind-k8ssandra-0 | |
steps: | |
- name: Raise open files limit | |
run: | | |
echo fs.file-max=1000000 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p | |
echo fs.inotify.max_user_instances=1280 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p | |
echo fs.inotify.max_user_watches=655360 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p | |
ulimit -a | |
- name: Free diskspace by removing unused packages | |
run: | | |
sudo rm -rf /usr/local/lib/android | |
sudo rm -rf /usr/share/dotnet | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v1 | |
- name: Login to DockerHub | |
uses: docker/login-action@v2 | |
with: | |
username: ${{ secrets.K8SSANDRA_DOCKER_HUB_USERNAME }} | |
password: ${{ secrets.K8SSANDRA_DOCKER_HUB_PASSWORD }} | |
- uses: actions/checkout@v3 | |
with: | |
path: cassandra-medusa | |
- uses: actions/checkout@v3 | |
with: | |
repository: k8ssandra/k8ssandra-operator | |
# Modify this line to use a different branch when making changes to the operator affecting Medusa | |
# Change to main once the operator changes are merged | |
ref: add-medusa-non-regression-test | |
path: k8ssandra-operator | |
- name: Set up Go | |
uses: actions/setup-go@v3 | |
with: | |
go-version: '1.20' | |
cache: true | |
cache-dependency-path: "**/*.sum" | |
- name: Install Kind | |
working-directory: k8ssandra-operator | |
run: go get sigs.k8s.io/kind | |
- name: Install kustomize | |
uses: imranismail/setup-kustomize@v2 | |
with: | |
kustomize-version: 4.x | |
- name: Build Medusa | |
working-directory: cassandra-medusa | |
run: | | |
echo "short_sha=$(git rev-parse --short HEAD)" >> $GITHUB_ENV | |
python setup.py build | |
- name: Build Medusa image | |
id: docker_build | |
uses: docker/build-push-action@v4 | |
with: | |
file: cassandra-medusa/k8s/Dockerfile | |
context: ./cassandra-medusa | |
tags: k8ssandra/medusa:${{ env.short_sha }}-tmp | |
platforms: linux/amd64 | |
push: true | |
- name: Setup kind cluster | |
working-directory: k8ssandra-operator | |
run: | | |
make single-create | |
- name: Run e2e test ( ${{ matrix.e2e_test }} ) | |
working-directory: k8ssandra-operator | |
run: | | |
e2e_test="TestOperator/${{ matrix.e2e_test }}" | |
args="-kubeconfigFile '${{ env.KUBECONFIG_FILE }}'" | |
args="$args -controlPlane '${{ env.CONTROL_PLANE }}'" | |
args="$args -dataPlanes '${{ env.DATA_PLANES }}'" | |
args="$args -medusaImageTag ${{ env.short_sha }}-tmp" | |
make E2E_TEST="$e2e_test" TEST_ARGS="$args" e2e-test | |
- name: Setup tmate session | |
if: ${{ failure() }} | |
uses: mxschmitt/action-tmate@v3 | |
timeout-minutes: 15 | |
- name: Get artefact upload directory | |
working-directory: k8ssandra-operator | |
if: ${{ failure() }} | |
run: | | |
uploaddir_name=$(echo ${{ matrix.e2e_test }}| sed 's/\//__/g') | |
echo 'setting uploaddir_name to' $uploaddir_name | |
echo "uploaddir_name=$uploaddir_name" >> $GITHUB_ENV | |
- name: Archive k8s logs | |
if: ${{ failure() }} | |
uses: actions/upload-artifact@v3 | |
with: | |
name: k8s-logs-${{ env.uploaddir_name }} | |
path: ./k8ssandra-operator/build/test | |
publish-docker-master: | |
needs: [debian-build, build, integration-tests, k8ssandra-e2e-tests] | |
if: github.event_name == 'push' && github.ref == 'refs/heads/master' | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Set up QEMU | |
uses: docker/setup-qemu-action@v2 | |
with: | |
platforms: 'arm64,arm' | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v2 | |
- name: Login to DockerHub | |
uses: docker/login-action@v2 | |
with: | |
username: ${{ secrets.K8SSANDRA_DOCKER_HUB_USERNAME }} | |
password: ${{ secrets.K8SSANDRA_DOCKER_HUB_PASSWORD }} | |
- name: Set git parsed values | |
id: vars | |
shell: bash | |
run: | | |
echo "sha_short=$(git rev-parse --short=8 ${{ github.sha }})" >> $GITHUB_OUTPUT | |
- name: Build Medusa | |
run: | | |
echo "Publishing release $(git rev-parse --short HEAD) in Docker Hub" | |
python setup.py build | |
- name: Build image and push | |
id: docker_build | |
uses: docker/build-push-action@v4 | |
with: | |
file: k8s/Dockerfile | |
context: . | |
push: true | |
tags: k8ssandra/medusa:${{ steps.vars.outputs.sha_short}},k8ssandra/medusa:master | |
platforms: linux/amd64,linux/arm64 | |
cache-from: type=local,src=/tmp/.buildx-cache | |
cache-to: type=local,dest=/tmp/.buildx-cache | |
publish-debian: | |
# We can only release if the build above succeeded first | |
needs: [debian-build, build, integration-tests, k8ssandra-e2e-tests] | |
if: github.event_name == 'release' && github.event.action == 'published' | |
strategy: | |
matrix: | |
suite: [focal] | |
include: | |
- suite: focal | |
os-version: ubuntu-20.04 | |
fail-fast: false | |
runs-on: ${{ matrix.os-version }} | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Install dependencies | |
run: | | |
sudo curl -L "https://github.com/docker/compose/releases/download/1.25.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose | |
sudo chmod +x /usr/local/bin/docker-compose | |
sudo ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose | |
- name: Build Debian | |
run: | | |
version=$(cat VERSION) | |
echo "VERSION=$version" >> $GITHUB_ENV | |
cd packaging/docker-build | |
docker-compose build "cassandra-medusa-builder-${{ matrix.suite }}" \ | |
&& docker-compose run "cassandra-medusa-builder-${{ matrix.suite }}" | |
- name: Push Debian to Cloudsmith | |
id: push-deb | |
uses: cloudsmith-io/action@master | |
with: | |
api-key: ${{ secrets.CLOUDSMITH_API_KEY }} | |
command: 'push' | |
format: 'deb' | |
owner: 'thelastpickle' | |
repo: 'medusa' | |
distro: 'ubuntu' | |
release: ${{ matrix.suite }} | |
republish: 'true' | |
file: "packages/cassandra-medusa_${{ env.VERSION }}-0~${{ matrix.suite }}0_amd64.deb" | |
publish-docker: | |
needs: [publish-debian] | |
if: github.event_name == 'release' && github.event.action == 'published' | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Set up QEMU | |
uses: docker/setup-qemu-action@v2 | |
with: | |
platforms: 'arm64,arm' | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v1 | |
- name: Login to DockerHub | |
uses: docker/login-action@v2 | |
with: | |
username: ${{ secrets.K8SSANDRA_DOCKER_HUB_USERNAME }} | |
password: ${{ secrets.K8SSANDRA_DOCKER_HUB_PASSWORD }} | |
- name: Set git parsed values | |
id: vars | |
shell: bash | |
run: | | |
echo "version=$(cat VERSION)" >> $GITHUB_OUTPUT | |
- name: Build Medusa | |
run: | | |
echo "Publishing release ${{ steps.vars.outputs.version}} in Docker Hub" | |
python setup.py build | |
- name: Build image and push | |
id: docker_build | |
uses: docker/build-push-action@v4 | |
with: | |
file: k8s/Dockerfile | |
context: . | |
push: true | |
tags: k8ssandra/medusa:${{ steps.vars.outputs.version}} | |
platforms: linux/amd64,linux/arm64 | |
cache-from: type=local,src=/tmp/.buildx-cache | |
cache-to: type=local,dest=/tmp/.buildx-cache | |
publish-pypi: | |
# We can only release if the build above succeeded first | |
needs: [publish-docker] | |
if: github.event_name == 'release' && github.event.action == 'published' | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Set up Python 3.7 | |
uses: actions/setup-python@v1 | |
with: | |
python-version: 3.7 | |
- name: Install dependencies | |
run: | | |
python -m venv venv | |
. venv/bin/activate | |
python -m pip install --upgrade pip | |
- name: Build and publish to pypi | |
env: | |
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} | |
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} | |
run: | | |
. venv/bin/activate | |
pip install setuptools wheel twine | |
python setup.py sdist bdist_wheel | |
twine upload dist/* |