- ubuntu 18.04
- ceph-ansible tag: v4.0.15
- grafana: 3000
- node exporter: 9100
- prometheus: 9091
- mgr: 9283
- dashboard: 8443
- object s3: 8080
git clone https://github.com/tekchansin/cs100-workshop.git /opt/cs100
cd /opt/cs100
git submodule update --init
apt update
apt install python-pip virtualenv -y
virtualenv /opt/cephenv --system-site-packages
source /opt/cephenv/bin/activate
pip install -r /opt/cs100/ceph-ansible/requirements.txt
- ssh to node
wipefs -a /dev/sdb /dev/sdc /dev/sdd /dev/sde /dev/sdf /dev/sdh /dev/sdi
ssh-keygen
cat ~/.ssh/id_rsa.pub > /home/nc-user/.ssh/authorized_keys
mkdir /etc/ansible
cat > /etc/ansible/ansible.cfg <<EOF
[defaults]
host_key_checking=False
pipelining=True
forks=100
EOF
cd /opt/cs100/ceph-ansible
ansible-playbook -b -i ../hosts.ini site-docker.yml.sample
docker exec -it ceph-mgr-cephansible-1 bash
ceph -s
- docker exec -it ceph-mgr-cephserver-1 bash
- ceph dashboard set-ganesha-clusters-rados-pool-namespace cephfs_data
- ceph osd erasure-code-profile get default
- ceph osd erasure-code-profile set ecprofile k=2 m=1 crush-failure-domain=host
- ceph osd pool create ecpool 128 128 erasure ecprofile
(https://docs.ceph.com/docs/nautilus/rados/operations/cache-tiering/)
- ceph osd pool create {cachepool} 128 128
- ceph osd tier add {storagepool} {cachepool}
- ceph osd tier cache-mode {cachepool} writeback
- ceph osd pool set {cachepool} hit_set_type bloom
- ceph osd pool set {cachepool} cache_target_dirty_ratio 0.4
- ceph osd pool set {cachepool} cache_target_dirty_high_ratio 0.6
- ceph osd pool set {cachepool} cache_target_full_ratio 0.8
- ceph osd pool set {cachepool} cache_min_flush_age 600
- ceph osd pool set {cachepool} cache_min_evict_age 1800
- ceph osd tier cache-mode {cachepool} proxy
- wait until cache write file flush
- rados -p {cachepool} ls
- rados -p {cachepool} cache-flush-evict-all
- ceph osd tier cache-mode {cachepool} none
- ceph osd tier remove {storagepool} {cachepool}
- yum install cryptsetup
- cryptsetup benchmark
- rbd -p ecpool_cache bench test --io-type write --io-size 8192 --io-threads 256 --io-total 10G --io-pattern seq
https://docs.ceph.com/docs/master/mgr/alerts/
https://docs.ceph.com/docs/master/mgr/diskprediction/
https://docs.ceph.com/docs/master/mgr/zabbix/
ceph zabbix config-set zabbix_server_ip
ceph zabbix config-set agent_hostname
ceph zabbix config-show
ceph zabbix send
ceph zabbix discovery