Use rook ceph for some cinder jobs

Change-Id: I2af04eb6ad313593fb25f9430a00b4e2c5d503fb
This commit is contained in:
Vladimir Kozhukalov 2023-12-05 20:44:33 -06:00
parent b52ceef053
commit 444f1fd161
8 changed files with 88 additions and 26 deletions

View File

@ -0,0 +1,17 @@
#!/bin/bash
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -xe
cd ${OSH_INFRA_PATH:-"../openstack-helm-infra/"}; ./tools/deployment/ceph/ceph-adapter-rook.sh; cd -

View File

@ -0,0 +1,17 @@
#!/bin/bash
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -xe
cd ${OSH_INFRA_PATH:-"../openstack-helm-infra/"}; ./tools/deployment/ceph/ceph-rook.sh; cd -

View File

@ -16,10 +16,8 @@ set -xe
export CEPH_ENABLED=true
if [ "${CREATE_LOOPBACK_DEVICES_FOR_CEPH:=true}" == "true" ]; then
./tools/deployment/common/setup-ceph-loopback-device.sh --ceph-osd-data ${CEPH_OSD_DATA_DEVICE:=/dev/loop0} \
--ceph-osd-dbwal ${CEPH_OSD_DB_WAL_DEVICE:=/dev/loop1}
fi
: ${CEPH_OSD_DATA_DEVICE:="/dev/loop100"}
: ${POD_NETWORK_CIDR:="10.244.0.0/24"}
#NOTE: Lint and package chart
export HELM_CHART_ROOT_PATH="${HELM_CHART_ROOT_PATH:="${OSH_INFRA_PATH:="../openstack-helm-infra"}"}"
@ -27,6 +25,8 @@ for CHART in ceph-mon ceph-osd ceph-client ceph-provisioners; do
make -C ${HELM_CHART_ROOT_PATH} "${CHART}"
done
NUMBER_OF_OSDS="$(kubectl get nodes -l ceph-osd=enabled --no-headers | wc -l)"
#NOTE: Deploy command
[ -s /tmp/ceph-fs-uuid.txt ] || uuidgen > /tmp/ceph-fs-uuid.txt
CEPH_FS_ID="$(cat /tmp/ceph-fs-uuid.txt)"
@ -47,8 +47,8 @@ endpoints:
ceph_mgr:
namespace: ceph
network:
public: 172.17.0.1/16
cluster: 172.17.0.1/16
public: "${POD_NETWORK_CIDR}"
cluster: "${POD_NETWORK_CIDR}"
deployment:
storage_secrets: true
ceph: true
@ -75,8 +75,8 @@ conf:
crush:
tunables: ${CRUSH_TUNABLES}
target:
osd: 1
final_osd: 1
osd: ${NUMBER_OF_OSDS}
final_osd: ${NUMBER_OF_OSDS}
pg_per_osd: 100
default:
crush_rule: same_host
@ -166,13 +166,12 @@ conf:
- data:
type: bluestore
location: ${CEPH_OSD_DATA_DEVICE}
block_db:
location: ${CEPH_OSD_DB_WAL_DEVICE}
size: "5GB"
block_wal:
location: ${CEPH_OSD_DB_WAL_DEVICE}
size: "2GB"
# block_db:
# location: ${CEPH_OSD_DB_WAL_DEVICE}
# size: "5GB"
# block_wal:
# location: ${CEPH_OSD_DB_WAL_DEVICE}
# size: "2GB"
pod:
replicas:
mds: 1

View File

@ -27,19 +27,19 @@ conf:
pools:
backup:
replication: 1
crush_rule: same_host
crush_rule: replicated_rule
chunk_size: 8
app_name: cinder-backup
# default pool used by rbd1 backend
cinder.volumes:
replication: 1
crush_rule: same_host
crush_rule: replicated_rule
chunk_size: 8
app_name: cinder-volume
# secondary pool used by rbd2 backend
cinder.volumes.gold:
replication: 1
crush_rule: same_host
crush_rule: replicated_rule
chunk_size: 8
app_name: cinder-volume
backends:

View File

@ -45,7 +45,7 @@
- job:
name: openstack-helm-cinder-2023-1-ubuntu_focal
parent: openstack-helm-cinder
nodeset: openstack-helm-1node-ubuntu_focal
nodeset: openstack-helm-3nodes-ubuntu_focal
vars:
osh_params:
openstack_release: "2023.1"
@ -54,8 +54,8 @@
- job:
name: openstack-helm-cinder-2023-1-ubuntu_jammy
parent: openstack-helm-cinder
nodeset: openstack-helm-1node-ubuntu_jammy
parent: openstack-helm-cinder-rook
nodeset: openstack-helm-3nodes-ubuntu_jammy
vars:
osh_params:
openstack_release: "2023.1"

View File

@ -34,8 +34,8 @@
- job:
name: openstack-helm-cinder-2023-2-ubuntu_jammy
parent: openstack-helm-cinder
nodeset: openstack-helm-1node-ubuntu_jammy
parent: openstack-helm-cinder-rook
nodeset: openstack-helm-3nodes-ubuntu_jammy
vars:
osh_params:
openstack_release: "2023.2"

View File

@ -47,6 +47,13 @@
- tools/gate/playbooks/deploy-env.yaml
- tools/gate/playbooks/run-scripts.yaml
vars:
kubeadm:
pod_network_cidr: "10.244.0.0/24"
service_cidr: "10.96.0.0/16"
loopback_setup: true
loopback_device: /dev/loop100
loopback_image: "/var/lib/openstack-helm/ceph-loop.img"
ceph_osd_data_device: /dev/loop100
# the k8s package versions are available here
# https://packages.cloud.google.com/apt/dists/kubernetes-xenial/main/binary-amd64/Packages
kube_version: "1.26.3-00"
@ -125,6 +132,28 @@
- ./tools/deployment/component/cinder/cinder.sh
- ./tools/deployment/common/force-cronjob-run.sh
- job:
name: openstack-helm-cinder-rook
parent: openstack-helm-deploy
abstract: true
files:
- ^cinder/.*$
- ^zuul\.d/.*$
- ^tools/deployment/component/cinder/.
vars:
gate_scripts:
- ./tools/deployment/common/prepare-k8s.sh
- ./tools/deployment/common/setup-client.sh
- ./tools/deployment/component/ceph/ceph-rook.sh
- ./tools/deployment/component/ceph/ceph-adapter-rook.sh
- ./tools/deployment/component/common/ingress.sh
- - ./tools/deployment/component/common/mariadb.sh
- ./tools/deployment/component/common/memcached.sh
- ./tools/deployment/component/common/rabbitmq.sh
- ./tools/deployment/component/keystone/keystone.sh
- ./tools/deployment/component/cinder/cinder.sh
- ./tools/deployment/common/force-cronjob-run.sh
- job:
name: openstack-helm-umbrella
parent: openstack-helm-deploy

View File

@ -34,14 +34,14 @@
# 2023.1
- openstack-helm-horizon-2023-1-ubuntu_jammy
- openstack-helm-keystone-ldap-2023-1-ubuntu_focal
- openstack-helm-cinder-2023-1-ubuntu_focal
- openstack-helm-cinder-2023-1-ubuntu_jammy
- openstack-helm-cinder-2023-1-ubuntu_focal # 3 nodes
- openstack-helm-cinder-2023-1-ubuntu_jammy # 3 nodes rook
- openstack-helm-compute-kit-2023-1-ubuntu_focal
- openstack-helm-compute-kit-2023-1-ubuntu_jammy
- openstack-helm-umbrella-2023-1-ubuntu_focal
- openstack-helm-tls-2023-1-ubuntu_focal
# 2023.2
- openstack-helm-cinder-2023-2-ubuntu_jammy
- openstack-helm-cinder-2023-2-ubuntu_jammy # 3 nodes rook
- openstack-helm-compute-kit-2023-2-ubuntu_jammy
gate:
jobs: